diff --git a/backend/server.py b/backend/server.py new file mode 100644 index 0000000000000000000000000000000000000000..41b0c0724a51f975b0eab96fd3ab3595ee6bc004 --- /dev/null +++ b/backend/server.py @@ -0,0 +1,58 @@ +from flask import Flask +from pymongo import MongoClient +from pprint import pprint +import datetime + +x = datetime.datetime.now() + +app = Flask(__name__) + +@app.route('/data') +def get_time(): + + return{ + 'Name' : 'geek', + 'Age' : '22', + 'Date' : x, + "programming" : 'python' + } + +@app.route('/test') +def get_info(): + + client = MongoClient("mongodb+srv://test:123@cluster0.6q7wu5h.mongodb.net/") + print(client) + print(client.list_database_names()) + db = client.get_database("test1") + print(f"db: {db}") + #collection = db.get_collection("test2") + + print(db.list_collection_names()) + + collection = db.test2 + + pprint(collection.find_one()) + + obj = collection.find_one() + x = datetime.datetime.now() + + print(obj) + + return { + 'Name' : obj["Name"], + 'Age' : obj["Age"], + 'Date' : x, + "programming" : obj["Programming"] + } + + return{ + 'Name' : 'geek', + 'Age' : '22', + 'Date' : x, + "programming" : 'python' + } + +if __name__ == '__main__': + + + app.run(debug=True) \ No newline at end of file diff --git a/backend/test/bin/Activate.ps1 b/backend/test/bin/Activate.ps1 new file mode 100644 index 0000000000000000000000000000000000000000..2fb3852c3cf1a565ccf813f876a135ecf6f99712 --- /dev/null +++ b/backend/test/bin/Activate.ps1 @@ -0,0 +1,241 @@ +<# +.Synopsis +Activate a Python virtual environment for the current PowerShell session. + +.Description +Pushes the python executable for a virtual environment to the front of the +$Env:PATH environment variable and sets the prompt to signify that you are +in a Python virtual environment. Makes use of the command line switches as +well as the `pyvenv.cfg` file values present in the virtual environment. + +.Parameter VenvDir +Path to the directory that contains the virtual environment to activate. The +default value for this is the parent of the directory that the Activate.ps1 +script is located within. + +.Parameter Prompt +The prompt prefix to display when this virtual environment is activated. By +default, this prompt is the name of the virtual environment folder (VenvDir) +surrounded by parentheses and followed by a single space (ie. '(.venv) '). + +.Example +Activate.ps1 +Activates the Python virtual environment that contains the Activate.ps1 script. + +.Example +Activate.ps1 -Verbose +Activates the Python virtual environment that contains the Activate.ps1 script, +and shows extra information about the activation as it executes. + +.Example +Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv +Activates the Python virtual environment located in the specified location. + +.Example +Activate.ps1 -Prompt "MyPython" +Activates the Python virtual environment that contains the Activate.ps1 script, +and prefixes the current prompt with the specified string (surrounded in +parentheses) while the virtual environment is active. + +.Notes +On Windows, it may be required to enable this Activate.ps1 script by setting the +execution policy for the user. You can do this by issuing the following PowerShell +command: + +PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser + +For more information on Execution Policies: +https://go.microsoft.com/fwlink/?LinkID=135170 + +#> +Param( + [Parameter(Mandatory = $false)] + [String] + $VenvDir, + [Parameter(Mandatory = $false)] + [String] + $Prompt +) + +<# Function declarations --------------------------------------------------- #> + +<# +.Synopsis +Remove all shell session elements added by the Activate script, including the +addition of the virtual environment's Python executable from the beginning of +the PATH variable. + +.Parameter NonDestructive +If present, do not remove this function from the global namespace for the +session. + +#> +function global:deactivate ([switch]$NonDestructive) { + # Revert to original values + + # The prior prompt: + if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) { + Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt + Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT + } + + # The prior PYTHONHOME: + if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) { + Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME + Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME + } + + # The prior PATH: + if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) { + Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH + Remove-Item -Path Env:_OLD_VIRTUAL_PATH + } + + # Just remove the VIRTUAL_ENV altogether: + if (Test-Path -Path Env:VIRTUAL_ENV) { + Remove-Item -Path env:VIRTUAL_ENV + } + + # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether: + if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) { + Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force + } + + # Leave deactivate function in the global namespace if requested: + if (-not $NonDestructive) { + Remove-Item -Path function:deactivate + } +} + +<# +.Description +Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the +given folder, and returns them in a map. + +For each line in the pyvenv.cfg file, if that line can be parsed into exactly +two strings separated by `=` (with any amount of whitespace surrounding the =) +then it is considered a `key = value` line. The left hand string is the key, +the right hand is the value. + +If the value starts with a `'` or a `"` then the first and last character is +stripped from the value before being captured. + +.Parameter ConfigDir +Path to the directory that contains the `pyvenv.cfg` file. +#> +function Get-PyVenvConfig( + [String] + $ConfigDir +) { + Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg" + + # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue). + $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue + + # An empty map will be returned if no config file is found. + $pyvenvConfig = @{ } + + if ($pyvenvConfigPath) { + + Write-Verbose "File exists, parse `key = value` lines" + $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath + + $pyvenvConfigContent | ForEach-Object { + $keyval = $PSItem -split "\s*=\s*", 2 + if ($keyval[0] -and $keyval[1]) { + $val = $keyval[1] + + # Remove extraneous quotations around a string value. + if ("'""".Contains($val.Substring(0, 1))) { + $val = $val.Substring(1, $val.Length - 2) + } + + $pyvenvConfig[$keyval[0]] = $val + Write-Verbose "Adding Key: '$($keyval[0])'='$val'" + } + } + } + return $pyvenvConfig +} + + +<# Begin Activate script --------------------------------------------------- #> + +# Determine the containing directory of this script +$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition +$VenvExecDir = Get-Item -Path $VenvExecPath + +Write-Verbose "Activation script is located in path: '$VenvExecPath'" +Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)" +Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)" + +# Set values required in priority: CmdLine, ConfigFile, Default +# First, get the location of the virtual environment, it might not be +# VenvExecDir if specified on the command line. +if ($VenvDir) { + Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values" +} +else { + Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir." + $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/") + Write-Verbose "VenvDir=$VenvDir" +} + +# Next, read the `pyvenv.cfg` file to determine any required value such +# as `prompt`. +$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir + +# Next, set the prompt from the command line, or the config file, or +# just use the name of the virtual environment folder. +if ($Prompt) { + Write-Verbose "Prompt specified as argument, using '$Prompt'" +} +else { + Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value" + if ($pyvenvCfg -and $pyvenvCfg['prompt']) { + Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'" + $Prompt = $pyvenvCfg['prompt']; + } + else { + Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)" + Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'" + $Prompt = Split-Path -Path $venvDir -Leaf + } +} + +Write-Verbose "Prompt = '$Prompt'" +Write-Verbose "VenvDir='$VenvDir'" + +# Deactivate any currently active virtual environment, but leave the +# deactivate function in place. +deactivate -nondestructive + +# Now set the environment variable VIRTUAL_ENV, used by many tools to determine +# that there is an activated venv. +$env:VIRTUAL_ENV = $VenvDir + +if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) { + + Write-Verbose "Setting prompt to '$Prompt'" + + # Set the prompt to include the env name + # Make sure _OLD_VIRTUAL_PROMPT is global + function global:_OLD_VIRTUAL_PROMPT { "" } + Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT + New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt + + function global:prompt { + Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) " + _OLD_VIRTUAL_PROMPT + } +} + +# Clear PYTHONHOME +if (Test-Path -Path Env:PYTHONHOME) { + Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME + Remove-Item -Path Env:PYTHONHOME +} + +# Add the venv to the PATH +Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH +$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH" diff --git a/backend/test/bin/activate b/backend/test/bin/activate new file mode 100644 index 0000000000000000000000000000000000000000..4394ee6cb0a58c32198c2b3fc77fe2d24bedd1e3 --- /dev/null +++ b/backend/test/bin/activate @@ -0,0 +1,76 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then + PATH="${_OLD_VIRTUAL_PATH:-}" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then + PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r + fi + + if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then + PS1="${_OLD_VIRTUAL_PS1:-}" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "${1:-}" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelevant variables +deactivate nondestructive + +VIRTUAL_ENV="/home/simon/Documents/TDDC88/backend/test" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "${PYTHONHOME:-}" ] ; then + _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" + unset PYTHONHOME +fi + +if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then + _OLD_VIRTUAL_PS1="${PS1:-}" + if [ "x(test) " != x ] ; then + PS1="(test) ${PS1:-}" + else + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then + # special case for Aspen magic directories + # see https://aspen.io/ + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" + else + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" + fi + fi + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then + hash -r +fi diff --git a/backend/test/bin/activate.csh b/backend/test/bin/activate.csh new file mode 100644 index 0000000000000000000000000000000000000000..e32cde79b3e1444b5cb69a3b4e20809a5eba4c6b --- /dev/null +++ b/backend/test/bin/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi <davidedb@gmail.com>. +# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com> + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelevant variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/simon/Documents/TDDC88/backend/test" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("test" != "") then + set env_name = "test" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see https://aspen.io/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/backend/test/bin/activate.fish b/backend/test/bin/activate.fish new file mode 100644 index 0000000000000000000000000000000000000000..88d8f228dc50d40deb17e3b23e92b4ab1a8cb883 --- /dev/null +++ b/backend/test/bin/activate.fish @@ -0,0 +1,75 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + functions -c _old_fish_prompt fish_prompt + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelevant variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/simon/Documents/TDDC88/backend/test" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + functions -c fish_prompt _old_fish_prompt + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Save the return status of the last command + set -l old_status $status + + # Prompt override? + if test -n "(test) " + printf "%s%s" "(test) " (set_color normal) + else + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see https://aspen.io/ + printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) + else + printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) + end + end + + # Restore the return status of the previous command. + echo "exit $old_status" | . + _old_fish_prompt + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/backend/test/bin/easy_install b/backend/test/bin/easy_install new file mode 100755 index 0000000000000000000000000000000000000000..307bd40948cd688f60c1dec035d934e48f8f94cf --- /dev/null +++ b/backend/test/bin/easy_install @@ -0,0 +1,8 @@ +#!/home/simon/Documents/TDDC88/backend/test/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend/test/bin/easy_install-3.8 b/backend/test/bin/easy_install-3.8 new file mode 100755 index 0000000000000000000000000000000000000000..307bd40948cd688f60c1dec035d934e48f8f94cf --- /dev/null +++ b/backend/test/bin/easy_install-3.8 @@ -0,0 +1,8 @@ +#!/home/simon/Documents/TDDC88/backend/test/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from setuptools.command.easy_install import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend/test/bin/flask b/backend/test/bin/flask new file mode 100755 index 0000000000000000000000000000000000000000..24cd60f5b699bf74c43aec471c289d3dcfd31ea0 --- /dev/null +++ b/backend/test/bin/flask @@ -0,0 +1,8 @@ +#!/home/simon/Documents/TDDC88/backend/test/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from flask.cli import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend/test/bin/pip b/backend/test/bin/pip new file mode 100755 index 0000000000000000000000000000000000000000..e04a4899cd68b26ce7df36fcd9999ce503854b61 --- /dev/null +++ b/backend/test/bin/pip @@ -0,0 +1,8 @@ +#!/home/simon/Documents/TDDC88/backend/test/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend/test/bin/pip3 b/backend/test/bin/pip3 new file mode 100755 index 0000000000000000000000000000000000000000..e04a4899cd68b26ce7df36fcd9999ce503854b61 --- /dev/null +++ b/backend/test/bin/pip3 @@ -0,0 +1,8 @@ +#!/home/simon/Documents/TDDC88/backend/test/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend/test/bin/pip3.8 b/backend/test/bin/pip3.8 new file mode 100755 index 0000000000000000000000000000000000000000..e04a4899cd68b26ce7df36fcd9999ce503854b61 --- /dev/null +++ b/backend/test/bin/pip3.8 @@ -0,0 +1,8 @@ +#!/home/simon/Documents/TDDC88/backend/test/bin/python3 +# -*- coding: utf-8 -*- +import re +import sys +from pip._internal.cli.main import main +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/backend/test/bin/python b/backend/test/bin/python new file mode 120000 index 0000000000000000000000000000000000000000..b8a0adbbb97ea11f36eb0c6b2a3c2881e96f8e26 --- /dev/null +++ b/backend/test/bin/python @@ -0,0 +1 @@ +python3 \ No newline at end of file diff --git a/backend/test/bin/python3 b/backend/test/bin/python3 new file mode 120000 index 0000000000000000000000000000000000000000..ae65fdaa12936b0d7525b090d198249fa7623e66 --- /dev/null +++ b/backend/test/bin/python3 @@ -0,0 +1 @@ +/usr/bin/python3 \ No newline at end of file diff --git a/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst new file mode 100644 index 0000000000000000000000000000000000000000..c37cae49ec77ad6ebb25568c1605f1fee5313cfb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..f54bb5ca1a59f2696ed5273a531aff9088754da0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/METADATA @@ -0,0 +1,113 @@ +Metadata-Version: 2.1 +Name: Jinja2 +Version: 3.1.2 +Summary: A very fast and expressive template engine. +Home-page: https://palletsprojects.com/p/jinja/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://jinja.palletsprojects.com/ +Project-URL: Changes, https://jinja.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/jinja/ +Project-URL: Issue Tracker, https://github.com/pallets/jinja/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: MarkupSafe (>=2.0) +Provides-Extra: i18n +Requires-Dist: Babel (>=2.7) ; extra == 'i18n' + +Jinja +===== + +Jinja is a fast, expressive, extensible templating engine. Special +placeholders in the template allow writing code similar to Python +syntax. Then the template is passed data to render the final document. + +It includes: + +- Template inheritance and inclusion. +- Define and import macros within templates. +- HTML templates can use autoescaping to prevent XSS from untrusted + user input. +- A sandboxed environment can safely render untrusted templates. +- AsyncIO support for generating templates and calling async + functions. +- I18N support with Babel. +- Templates are compiled to optimized Python code just-in-time and + cached, or can be compiled ahead-of-time. +- Exceptions point to the correct line in templates to make debugging + easier. +- Extensible filters, tests, functions, and even syntax. + +Jinja's philosophy is that while application logic belongs in Python if +possible, it shouldn't make the template designer's job difficult by +restricting functionality too much. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Jinja2 + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +In A Nutshell +------------- + +.. code-block:: jinja + + {% extends "base.html" %} + {% block title %}Members{% endblock %} + {% block content %} + <ul> + {% for user in users %} + <li><a href="{{ user.url }}">{{ user.username }}</a></li> + {% endfor %} + </ul> + {% endblock %} + + +Donate +------ + +The Pallets organization develops and supports Jinja and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://jinja.palletsprojects.com/ +- Changes: https://jinja.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Jinja2/ +- Source Code: https://github.com/pallets/jinja/ +- Issue Tracker: https://github.com/pallets/jinja/issues/ +- Website: https://palletsprojects.com/p/jinja/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..197684419828bbbdb64a32fc366c0f5dc801f268 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/RECORD @@ -0,0 +1,58 @@ +Jinja2-3.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +Jinja2-3.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +Jinja2-3.1.2.dist-info/METADATA,sha256=PZ6v2SIidMNixR7MRUX9f7ZWsPwtXanknqiZUmRbh4U,3539 +Jinja2-3.1.2.dist-info/RECORD,, +Jinja2-3.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +Jinja2-3.1.2.dist-info/entry_points.txt,sha256=zRd62fbqIyfUpsRtU7EVIFyiu1tPwfgO7EvPErnxgTE,59 +Jinja2-3.1.2.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7 +jinja2/__init__.py,sha256=8vGduD8ytwgD6GDSqpYc2m3aU-T7PKOAddvVXgGr_Fs,1927 +jinja2/__pycache__/__init__.cpython-38.pyc,, +jinja2/__pycache__/_identifier.cpython-38.pyc,, +jinja2/__pycache__/async_utils.cpython-38.pyc,, +jinja2/__pycache__/bccache.cpython-38.pyc,, +jinja2/__pycache__/compiler.cpython-38.pyc,, +jinja2/__pycache__/constants.cpython-38.pyc,, +jinja2/__pycache__/debug.cpython-38.pyc,, +jinja2/__pycache__/defaults.cpython-38.pyc,, +jinja2/__pycache__/environment.cpython-38.pyc,, +jinja2/__pycache__/exceptions.cpython-38.pyc,, +jinja2/__pycache__/ext.cpython-38.pyc,, +jinja2/__pycache__/filters.cpython-38.pyc,, +jinja2/__pycache__/idtracking.cpython-38.pyc,, +jinja2/__pycache__/lexer.cpython-38.pyc,, +jinja2/__pycache__/loaders.cpython-38.pyc,, +jinja2/__pycache__/meta.cpython-38.pyc,, +jinja2/__pycache__/nativetypes.cpython-38.pyc,, +jinja2/__pycache__/nodes.cpython-38.pyc,, +jinja2/__pycache__/optimizer.cpython-38.pyc,, +jinja2/__pycache__/parser.cpython-38.pyc,, +jinja2/__pycache__/runtime.cpython-38.pyc,, +jinja2/__pycache__/sandbox.cpython-38.pyc,, +jinja2/__pycache__/tests.cpython-38.pyc,, +jinja2/__pycache__/utils.cpython-38.pyc,, +jinja2/__pycache__/visitor.cpython-38.pyc,, +jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 +jinja2/async_utils.py,sha256=dHlbTeaxFPtAOQEYOGYh_PHcDT0rsDaUJAFDl_0XtTg,2472 +jinja2/bccache.py,sha256=mhz5xtLxCcHRAa56azOhphIAe19u1we0ojifNMClDio,14061 +jinja2/compiler.py,sha256=Gs-N8ThJ7OWK4-reKoO8Wh1ZXz95MVphBKNVf75qBr8,72172 +jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 +jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299 +jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 +jinja2/environment.py,sha256=6uHIcc7ZblqOMdx_uYNKqRnnwAF0_nzbyeMP9FFtuh4,61349 +jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 +jinja2/ext.py,sha256=ivr3P7LKbddiXDVez20EflcO3q2aHQwz9P_PgWGHVqE,31502 +jinja2/filters.py,sha256=9js1V-h2RlyW90IhLiBGLM2U-k6SCy2F4BUUMgB3K9Q,53509 +jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704 +jinja2/lexer.py,sha256=DW2nX9zk-6MWp65YR2bqqj0xqCvLtD-u9NWT8AnFRxQ,29726 +jinja2/loaders.py,sha256=BfptfvTVpClUd-leMkHczdyPNYFzp_n7PKOJ98iyHOg,23207 +jinja2/meta.py,sha256=GNPEvifmSaU3CMxlbheBOZjeZ277HThOPUTf1RkppKQ,4396 +jinja2/nativetypes.py,sha256=DXgORDPRmVWgy034H0xL8eF7qYoK3DrMxs-935d0Fzk,4226 +jinja2/nodes.py,sha256=i34GPRAZexXMT6bwuf5SEyvdmS-bRCy9KMjwN5O6pjk,34550 +jinja2/optimizer.py,sha256=tHkMwXxfZkbfA1KmLcqmBMSaz7RLIvvItrJcPoXTyD8,1650 +jinja2/parser.py,sha256=nHd-DFHbiygvfaPtm9rcQXJChZG7DPsWfiEsqfwKerY,39595 +jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +jinja2/runtime.py,sha256=5CmD5BjbEJxSiDNTFBeKCaq8qU4aYD2v6q2EluyExms,33476 +jinja2/sandbox.py,sha256=Y0xZeXQnH6EX5VjaV2YixESxoepnRbW_3UeQosaBU3M,14584 +jinja2/tests.py,sha256=Am5Z6Lmfr2XaH_npIfJJ8MdXtWsbLjMULZJulTAj30E,5905 +jinja2/utils.py,sha256=u9jXESxGn8ATZNVolwmkjUVu4SA-tLgV0W7PcSfPfdQ,23965 +jinja2/visitor.py,sha256=MH14C6yq24G_KVtWzjwaI7Wg14PCJIYlWW1kpkxYak0,3568 diff --git a/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..becc9a66ea739ba941d48a749e248761cc6e658a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..7b9666c8ea311ea0f0cfe7bed861aaa5469f92bb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[babel.extractors] +jinja2 = jinja2.ext:babel_extract[i18n] diff --git a/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..7f7afbf3bf54b346092be6a72070fcbd305ead1e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/Jinja2-3.1.2.dist-info/top_level.txt @@ -0,0 +1 @@ +jinja2 diff --git a/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/LICENSE.rst b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/LICENSE.rst new file mode 100644 index 0000000000000000000000000000000000000000..9d227a0cc43c3268d15722b763bd94ad298645a1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..bced1652399a6ca9a5285d9e12bcecc91a987fec --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/METADATA @@ -0,0 +1,93 @@ +Metadata-Version: 2.1 +Name: MarkupSafe +Version: 2.1.3 +Summary: Safely add untrusted strings to HTML/XML markup. +Home-page: https://palletsprojects.com/p/markupsafe/ +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://markupsafe.palletsprojects.com/ +Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/markupsafe/ +Project-URL: Issue Tracker, https://github.com/pallets/markupsafe/issues/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Text Processing :: Markup :: HTML +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst + +MarkupSafe +========== + +MarkupSafe implements a text object that escapes characters so it is +safe to use in HTML and XML. Characters that have special meanings are +replaced so that they display as the actual characters. This mitigates +injection attacks, meaning untrusted user input can safely be displayed +on a page. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U MarkupSafe + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +Examples +-------- + +.. code-block:: pycon + + >>> from markupsafe import Markup, escape + + >>> # escape replaces special characters and wraps in Markup + >>> escape("<script>alert(document.cookie);</script>") + Markup('<script>alert(document.cookie);</script>') + + >>> # wrap in Markup to mark text "safe" and prevent escaping + >>> Markup("<strong>Hello</strong>") + Markup('<strong>hello</strong>') + + >>> escape(Markup("<strong>Hello</strong>")) + Markup('<strong>hello</strong>') + + >>> # Markup is a str subclass + >>> # methods and operators escape their arguments + >>> template = Markup("Hello <em>{name}</em>") + >>> template.format(name='"World"') + Markup('Hello <em>"World"</em>') + + +Donate +------ + +The Pallets organization develops and supports MarkupSafe and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://markupsafe.palletsprojects.com/ +- Changes: https://markupsafe.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/MarkupSafe/ +- Source Code: https://github.com/pallets/markupsafe/ +- Issue Tracker: https://github.com/pallets/markupsafe/issues/ +- Chat: https://discord.gg/pallets diff --git a/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..76fa99930436377c676cce0c44c97ea3e2f64877 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/RECORD @@ -0,0 +1,14 @@ +MarkupSafe-2.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +MarkupSafe-2.1.3.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +MarkupSafe-2.1.3.dist-info/METADATA,sha256=Wvvh4Tz-YtW24YagYdqrrrBdm9m-DjTdqJWhxlbU6-0,3003 +MarkupSafe-2.1.3.dist-info/RECORD,, +MarkupSafe-2.1.3.dist-info/WHEEL,sha256=5DQRW5VdkIxH8e7ylwa-YBCXLhSZeqHiz1ZmuOscrlo,148 +MarkupSafe-2.1.3.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 +markupsafe/__init__.py,sha256=xIItqrn1Bwi7FxPJO9rCVQBG0Evewue1Tl4BV0l9xEs,10338 +markupsafe/__pycache__/__init__.cpython-38.pyc,, +markupsafe/__pycache__/_native.cpython-38.pyc,, +markupsafe/_native.py,sha256=GR86Qvo_GcgKmKreA1WmYN9ud17OFwkww8E-fiW-57s,1713 +markupsafe/_speedups.c,sha256=X2XvQVtIdcK4Usz70BvkzoOfjTCmQlDkkjYSn-swE0g,7083 +markupsafe/_speedups.cpython-38-x86_64-linux-gnu.so,sha256=-Dz2Ufn8AQigRhyqGFBUBTc42FMkavuNMl2X9zTBmMQ,45024 +markupsafe/_speedups.pyi,sha256=vfMCsOgbAXRNLUXkyuyonG8uEWKYU4PDqNuMaDELAYw,229 +markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..4133c42cc8b5944c92213b6765295c90c2ae81b3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux_2_17_x86_64 +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..75bf729258f9daef77370b6df1a57940f90fc23f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/MarkupSafe-2.1.3.dist-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/backend/test/lib/python3.8/site-packages/__pycache__/easy_install.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/__pycache__/easy_install.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4485130d69f698a1604540986e82a5ac282b3b9f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/__pycache__/easy_install.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/LICENSE.rst b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/LICENSE.rst new file mode 100644 index 0000000000000000000000000000000000000000..79c9825adbacb5d8c6eaee51863b8a40051d97c8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/LICENSE.rst @@ -0,0 +1,20 @@ +Copyright 2010 Jason Kirtland + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..d181f19f385cdcf01f3bc508309b1d3e199e7146 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/METADATA @@ -0,0 +1,63 @@ +Metadata-Version: 2.1 +Name: blinker +Version: 1.6.2 +Summary: Fast, simple object-to-object and broadcast signaling +Author-email: Jason Kirtland <jek@discorporate.us> +Maintainer-email: Pallets Ecosystem <contact@palletsprojects.com> +License: MIT License +Project-URL: Homepage, https://blinker.readthedocs.io +Project-URL: Documentation, https://blinker.readthedocs.io +Project-URL: Source Code, https://github.com/pallets-eco/blinker/ +Project-URL: Issue Tracker, https://github.com/pallets-eco/blinker/issues/ +Project-URL: Chat, https://discord.gg/pallets +Keywords: signal,emit,events,broadcast +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Software Development :: Libraries +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst + +Blinker +======= + +Blinker provides a fast dispatching system that allows any number of +interested parties to subscribe to events, or "signals". + +Signal receivers can subscribe to specific senders or receive signals +sent by any sender. + +.. code-block:: pycon + + >>> from blinker import signal + >>> started = signal('round-started') + >>> def each(round): + ... print(f"Round {round}") + ... + >>> started.connect(each) + + >>> def round_two(round): + ... print("This is round two.") + ... + >>> started.connect(round_two, sender=2) + + >>> for round in range(1, 4): + ... started.send(round) + ... + Round 1! + Round 2! + This is round two. + Round 3! + + +Links +----- + +- Documentation: https://blinker.readthedocs.io/ +- Changes: https://blinker.readthedocs.io/#changes +- PyPI Releases: https://pypi.org/project/blinker/ +- Source Code: https://github.com/pallets-eco/blinker/ +- Issue Tracker: https://github.com/pallets-eco/blinker/issues/ diff --git a/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..e573b49ba3597213eb829dae591b8aaa53c2aa8f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/RECORD @@ -0,0 +1,15 @@ +blinker-1.6.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +blinker-1.6.2.dist-info/LICENSE.rst,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054 +blinker-1.6.2.dist-info/METADATA,sha256=7MRskabu2wQvWIMFwgqP3w2LDt8nR5nCxH7Anu1ZrBM,1964 +blinker-1.6.2.dist-info/RECORD,, +blinker-1.6.2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +blinker-1.6.2.dist-info/top_level.txt,sha256=2NmsENM0J2t9Z8mkjxHDmGMQj7Bm8f5ZTTYe1x1fZtM,8 +blinker/__init__.py,sha256=Ko7EbvxyCl_UewgsP8XgDJqJcHZA7EsuhG72R_zDrcY,408 +blinker/__pycache__/__init__.cpython-38.pyc,, +blinker/__pycache__/_saferef.cpython-38.pyc,, +blinker/__pycache__/_utilities.cpython-38.pyc,, +blinker/__pycache__/base.cpython-38.pyc,, +blinker/_saferef.py,sha256=kWOTIWnCY3kOb8lZP74Rbx7bR_BLVg4TjwzNCRLhKHs,9096 +blinker/_utilities.py,sha256=GPXtJzykzVotoxHC79mgFQMPJtICwpVDCCpus4_JtsA,4110 +blinker/base.py,sha256=7Y-C0ZVIe-NrrskPeqj0bLSp4R6Cpq5LrzI1DmLqMEA,20469 +blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..1f37c02f2eb2e26b306202feaccb31e522b8b169 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..1ff4ca551011b890b3660077a2f8d8932b03dc37 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker-1.6.2.dist-info/top_level.txt @@ -0,0 +1 @@ +blinker diff --git a/backend/test/lib/python3.8/site-packages/blinker/__init__.py b/backend/test/lib/python3.8/site-packages/blinker/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..71d66d3bd40b533a0e1f3d3722a895a0d8c4e11b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker/__init__.py @@ -0,0 +1,19 @@ +from blinker.base import ANY +from blinker.base import NamedSignal +from blinker.base import Namespace +from blinker.base import receiver_connected +from blinker.base import Signal +from blinker.base import signal +from blinker.base import WeakNamespace + +__all__ = [ + "ANY", + "NamedSignal", + "Namespace", + "Signal", + "WeakNamespace", + "receiver_connected", + "signal", +] + +__version__ = "1.6.2" diff --git a/backend/test/lib/python3.8/site-packages/blinker/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/blinker/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..65e125188781c5b3b71c8a56dc23f1a15d164301 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/blinker/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/blinker/__pycache__/_saferef.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/blinker/__pycache__/_saferef.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a75867bc991ec146e8a8e9da189d37185954652c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/blinker/__pycache__/_saferef.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/blinker/__pycache__/_utilities.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/blinker/__pycache__/_utilities.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a8bc753a17b19857f4fa5722ae1d2f99148fc190 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/blinker/__pycache__/_utilities.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/blinker/__pycache__/base.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/blinker/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c880787b6aaacfb8f9e6280c66bf2d2971a89f29 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/blinker/__pycache__/base.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/blinker/_saferef.py b/backend/test/lib/python3.8/site-packages/blinker/_saferef.py new file mode 100644 index 0000000000000000000000000000000000000000..dcb70c1899df9c8e5b4ead958d796efde6a2929e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker/_saferef.py @@ -0,0 +1,230 @@ +# extracted from Louie, http://pylouie.org/ +# updated for Python 3 +# +# Copyright (c) 2006 Patrick K. O'Brien, Mike C. Fletcher, +# Matthew R. Scott +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the <ORGANIZATION> nor the names of its +# contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +"""Refactored 'safe reference from dispatcher.py""" +import operator +import sys +import traceback +import weakref + + +get_self = operator.attrgetter("__self__") +get_func = operator.attrgetter("__func__") + + +def safe_ref(target, on_delete=None): + """Return a *safe* weak reference to a callable target. + + - ``target``: The object to be weakly referenced, if it's a bound + method reference, will create a BoundMethodWeakref, otherwise + creates a simple weakref. + + - ``on_delete``: If provided, will have a hard reference stored to + the callable to be called after the safe reference goes out of + scope with the reference object, (either a weakref or a + BoundMethodWeakref) as argument. + """ + try: + im_self = get_self(target) + except AttributeError: + if callable(on_delete): + return weakref.ref(target, on_delete) + else: + return weakref.ref(target) + else: + if im_self is not None: + # Turn a bound method into a BoundMethodWeakref instance. + # Keep track of these instances for lookup by disconnect(). + assert hasattr(target, "im_func") or hasattr(target, "__func__"), ( + f"safe_ref target {target!r} has im_self, but no im_func, " + "don't know how to create reference" + ) + reference = BoundMethodWeakref(target=target, on_delete=on_delete) + return reference + + +class BoundMethodWeakref: + """'Safe' and reusable weak references to instance methods. + + BoundMethodWeakref objects provide a mechanism for referencing a + bound method without requiring that the method object itself + (which is normally a transient object) is kept alive. Instead, + the BoundMethodWeakref object keeps weak references to both the + object and the function which together define the instance method. + + Attributes: + + - ``key``: The identity key for the reference, calculated by the + class's calculate_key method applied to the target instance method. + + - ``deletion_methods``: Sequence of callable objects taking single + argument, a reference to this object which will be called when + *either* the target object or target function is garbage + collected (i.e. when this object becomes invalid). These are + specified as the on_delete parameters of safe_ref calls. + + - ``weak_self``: Weak reference to the target object. + + - ``weak_func``: Weak reference to the target function. + + Class Attributes: + + - ``_all_instances``: Class attribute pointing to all live + BoundMethodWeakref objects indexed by the class's + calculate_key(target) method applied to the target objects. + This weak value dictionary is used to short-circuit creation so + that multiple references to the same (object, function) pair + produce the same BoundMethodWeakref instance. + """ + + _all_instances = weakref.WeakValueDictionary() # type: ignore[var-annotated] + + def __new__(cls, target, on_delete=None, *arguments, **named): + """Create new instance or return current instance. + + Basically this method of construction allows us to + short-circuit creation of references to already-referenced + instance methods. The key corresponding to the target is + calculated, and if there is already an existing reference, + that is returned, with its deletion_methods attribute updated. + Otherwise the new instance is created and registered in the + table of already-referenced methods. + """ + key = cls.calculate_key(target) + current = cls._all_instances.get(key) + if current is not None: + current.deletion_methods.append(on_delete) + return current + else: + base = super().__new__(cls) + cls._all_instances[key] = base + base.__init__(target, on_delete, *arguments, **named) + return base + + def __init__(self, target, on_delete=None): + """Return a weak-reference-like instance for a bound method. + + - ``target``: The instance-method target for the weak reference, + must have im_self and im_func attributes and be + reconstructable via the following, which is true of built-in + instance methods:: + + target.im_func.__get__( target.im_self ) + + - ``on_delete``: Optional callback which will be called when + this weak reference ceases to be valid (i.e. either the + object or the function is garbage collected). Should take a + single argument, which will be passed a pointer to this + object. + """ + + def remove(weak, self=self): + """Set self.isDead to True when method or instance is destroyed.""" + methods = self.deletion_methods[:] + del self.deletion_methods[:] + try: + del self.__class__._all_instances[self.key] + except KeyError: + pass + for function in methods: + try: + if callable(function): + function(self) + except Exception: + try: + traceback.print_exc() + except AttributeError: + e = sys.exc_info()[1] + print( + f"Exception during saferef {self} " + f"cleanup function {function}: {e}" + ) + + self.deletion_methods = [on_delete] + self.key = self.calculate_key(target) + im_self = get_self(target) + im_func = get_func(target) + self.weak_self = weakref.ref(im_self, remove) + self.weak_func = weakref.ref(im_func, remove) + self.self_name = str(im_self) + self.func_name = str(im_func.__name__) + + @classmethod + def calculate_key(cls, target): + """Calculate the reference key for this reference. + + Currently this is a two-tuple of the id()'s of the target + object and the target function respectively. + """ + return (id(get_self(target)), id(get_func(target))) + + def __str__(self): + """Give a friendly representation of the object.""" + return "{}({}.{})".format( + self.__class__.__name__, + self.self_name, + self.func_name, + ) + + __repr__ = __str__ + + def __hash__(self): + return hash((self.self_name, self.key)) + + def __nonzero__(self): + """Whether we are still a valid reference.""" + return self() is not None + + def __eq__(self, other): + """Compare with another reference.""" + if not isinstance(other, self.__class__): + return operator.eq(self.__class__, type(other)) + return operator.eq(self.key, other.key) + + def __call__(self): + """Return a strong reference to the bound method. + + If the target cannot be retrieved, then will return None, + otherwise returns a bound instance method for our object and + function. + + Note: You may call this method any number of times, as it does + not invalidate the reference. + """ + target = self.weak_self() + if target is not None: + function = self.weak_func() + if function is not None: + return function.__get__(target) + return None diff --git a/backend/test/lib/python3.8/site-packages/blinker/_utilities.py b/backend/test/lib/python3.8/site-packages/blinker/_utilities.py new file mode 100644 index 0000000000000000000000000000000000000000..068d94cec22937bf15e670e83fa9112700635c9c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker/_utilities.py @@ -0,0 +1,142 @@ +from __future__ import annotations + +import asyncio +import inspect +import sys +import typing as t +from functools import partial +from weakref import ref + +from blinker._saferef import BoundMethodWeakref + +IdentityType = t.Union[t.Tuple[int, int], str, int] + + +class _symbol: + def __init__(self, name): + """Construct a new named symbol.""" + self.__name__ = self.name = name + + def __reduce__(self): + return symbol, (self.name,) + + def __repr__(self): + return self.name + + +_symbol.__name__ = "symbol" + + +class symbol: + """A constant symbol. + + >>> symbol('foo') is symbol('foo') + True + >>> symbol('foo') + foo + + A slight refinement of the MAGICCOOKIE=object() pattern. The primary + advantage of symbol() is its repr(). They are also singletons. + + Repeated calls of symbol('name') will all return the same instance. + + """ + + symbols = {} # type: ignore[var-annotated] + + def __new__(cls, name): + try: + return cls.symbols[name] + except KeyError: + return cls.symbols.setdefault(name, _symbol(name)) + + +def hashable_identity(obj: object) -> IdentityType: + if hasattr(obj, "__func__"): + return (id(obj.__func__), id(obj.__self__)) # type: ignore[attr-defined] + elif hasattr(obj, "im_func"): + return (id(obj.im_func), id(obj.im_self)) # type: ignore[attr-defined] + elif isinstance(obj, (int, str)): + return obj + else: + return id(obj) + + +WeakTypes = (ref, BoundMethodWeakref) + + +class annotatable_weakref(ref): + """A weakref.ref that supports custom instance attributes.""" + + receiver_id: t.Optional[IdentityType] + sender_id: t.Optional[IdentityType] + + +def reference( # type: ignore[no-untyped-def] + object, callback=None, **annotations +) -> annotatable_weakref: + """Return an annotated weak ref.""" + if callable(object): + weak = callable_reference(object, callback) + else: + weak = annotatable_weakref(object, callback) + for key, value in annotations.items(): + setattr(weak, key, value) + return weak # type: ignore[no-any-return] + + +def callable_reference(object, callback=None): + """Return an annotated weak ref, supporting bound instance methods.""" + if hasattr(object, "im_self") and object.im_self is not None: + return BoundMethodWeakref(target=object, on_delete=callback) + elif hasattr(object, "__self__") and object.__self__ is not None: + return BoundMethodWeakref(target=object, on_delete=callback) + return annotatable_weakref(object, callback) + + +class lazy_property: + """A @property that is only evaluated once.""" + + def __init__(self, deferred): + self._deferred = deferred + self.__doc__ = deferred.__doc__ + + def __get__(self, obj, cls): + if obj is None: + return self + value = self._deferred(obj) + setattr(obj, self._deferred.__name__, value) + return value + + +def is_coroutine_function(func: t.Any) -> bool: + # Python < 3.8 does not correctly determine partially wrapped + # coroutine functions are coroutine functions, hence the need for + # this to exist. Code taken from CPython. + if sys.version_info >= (3, 8): + return asyncio.iscoroutinefunction(func) + else: + # Note that there is something special about the AsyncMock + # such that it isn't determined as a coroutine function + # without an explicit check. + try: + from unittest.mock import AsyncMock # type: ignore[attr-defined] + + if isinstance(func, AsyncMock): + return True + except ImportError: + # Not testing, no asynctest to import + pass + + while inspect.ismethod(func): + func = func.__func__ + while isinstance(func, partial): + func = func.func + if not inspect.isfunction(func): + return False + + if func.__code__.co_flags & inspect.CO_COROUTINE: + return True + + acic = asyncio.coroutines._is_coroutine # type: ignore[attr-defined] + return getattr(func, "_is_coroutine", None) is acic diff --git a/backend/test/lib/python3.8/site-packages/blinker/base.py b/backend/test/lib/python3.8/site-packages/blinker/base.py new file mode 100644 index 0000000000000000000000000000000000000000..80e24e21c10386baad8dedf17c4756f34f7e8230 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/blinker/base.py @@ -0,0 +1,551 @@ +"""Signals and events. + +A small implementation of signals, inspired by a snippet of Django signal +API client code seen in a blog post. Signals are first-class objects and +each manages its own receivers and message emission. + +The :func:`signal` function provides singleton behavior for named signals. + +""" +from __future__ import annotations + +import typing as t +from collections import defaultdict +from contextlib import contextmanager +from warnings import warn +from weakref import WeakValueDictionary + +from blinker._utilities import annotatable_weakref +from blinker._utilities import hashable_identity +from blinker._utilities import IdentityType +from blinker._utilities import is_coroutine_function +from blinker._utilities import lazy_property +from blinker._utilities import reference +from blinker._utilities import symbol +from blinker._utilities import WeakTypes + +if t.TYPE_CHECKING: + import typing_extensions as te + + T_callable = t.TypeVar("T_callable", bound=t.Callable[..., t.Any]) + + T = t.TypeVar("T") + P = te.ParamSpec("P") + + AsyncWrapperType = t.Callable[[t.Callable[P, T]], t.Callable[P, t.Awaitable[T]]] + SyncWrapperType = t.Callable[[t.Callable[P, t.Awaitable[T]]], t.Callable[P, T]] + +ANY = symbol("ANY") +ANY.__doc__ = 'Token for "any sender".' +ANY_ID = 0 + + +class Signal: + """A notification emitter.""" + + #: An :obj:`ANY` convenience synonym, allows ``Signal.ANY`` + #: without an additional import. + ANY = ANY + + @lazy_property + def receiver_connected(self) -> Signal: + """Emitted after each :meth:`connect`. + + The signal sender is the signal instance, and the :meth:`connect` + arguments are passed through: *receiver*, *sender*, and *weak*. + + .. versionadded:: 1.2 + + """ + return Signal(doc="Emitted after a receiver connects.") + + @lazy_property + def receiver_disconnected(self) -> Signal: + """Emitted after :meth:`disconnect`. + + The sender is the signal instance, and the :meth:`disconnect` arguments + are passed through: *receiver* and *sender*. + + Note, this signal is emitted **only** when :meth:`disconnect` is + called explicitly. + + The disconnect signal can not be emitted by an automatic disconnect + (due to a weakly referenced receiver or sender going out of scope), + as the receiver and/or sender instances are no longer available for + use at the time this signal would be emitted. + + An alternative approach is available by subscribing to + :attr:`receiver_connected` and setting up a custom weakref cleanup + callback on weak receivers and senders. + + .. versionadded:: 1.2 + + """ + return Signal(doc="Emitted after a receiver disconnects.") + + def __init__(self, doc: str | None = None) -> None: + """ + :param doc: optional. If provided, will be assigned to the signal's + __doc__ attribute. + + """ + if doc: + self.__doc__ = doc + #: A mapping of connected receivers. + #: + #: The values of this mapping are not meaningful outside of the + #: internal :class:`Signal` implementation, however the boolean value + #: of the mapping is useful as an extremely efficient check to see if + #: any receivers are connected to the signal. + self.receivers: dict[IdentityType, t.Callable | annotatable_weakref] = {} + self.is_muted = False + self._by_receiver: dict[IdentityType, set[IdentityType]] = defaultdict(set) + self._by_sender: dict[IdentityType, set[IdentityType]] = defaultdict(set) + self._weak_senders: dict[IdentityType, annotatable_weakref] = {} + + def connect( + self, receiver: T_callable, sender: t.Any = ANY, weak: bool = True + ) -> T_callable: + """Connect *receiver* to signal events sent by *sender*. + + :param receiver: A callable. Will be invoked by :meth:`send` with + `sender=` as a single positional argument and any ``kwargs`` that + were provided to a call to :meth:`send`. + + :param sender: Any object or :obj:`ANY`, defaults to ``ANY``. + Restricts notifications delivered to *receiver* to only those + :meth:`send` emissions sent by *sender*. If ``ANY``, the receiver + will always be notified. A *receiver* may be connected to + multiple *sender* values on the same Signal through multiple calls + to :meth:`connect`. + + :param weak: If true, the Signal will hold a weakref to *receiver* + and automatically disconnect when *receiver* goes out of scope or + is garbage collected. Defaults to True. + + """ + receiver_id = hashable_identity(receiver) + receiver_ref: T_callable | annotatable_weakref + + if weak: + receiver_ref = reference(receiver, self._cleanup_receiver) + receiver_ref.receiver_id = receiver_id + else: + receiver_ref = receiver + sender_id: IdentityType + if sender is ANY: + sender_id = ANY_ID + else: + sender_id = hashable_identity(sender) + + self.receivers.setdefault(receiver_id, receiver_ref) + self._by_sender[sender_id].add(receiver_id) + self._by_receiver[receiver_id].add(sender_id) + del receiver_ref + + if sender is not ANY and sender_id not in self._weak_senders: + # wire together a cleanup for weakref-able senders + try: + sender_ref = reference(sender, self._cleanup_sender) + sender_ref.sender_id = sender_id + except TypeError: + pass + else: + self._weak_senders.setdefault(sender_id, sender_ref) + del sender_ref + + # broadcast this connection. if receivers raise, disconnect. + if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers: + try: + self.receiver_connected.send( + self, receiver=receiver, sender=sender, weak=weak + ) + except TypeError as e: + self.disconnect(receiver, sender) + raise e + if receiver_connected.receivers and self is not receiver_connected: + try: + receiver_connected.send( + self, receiver_arg=receiver, sender_arg=sender, weak_arg=weak + ) + except TypeError as e: + self.disconnect(receiver, sender) + raise e + return receiver + + def connect_via( + self, sender: t.Any, weak: bool = False + ) -> t.Callable[[T_callable], T_callable]: + """Connect the decorated function as a receiver for *sender*. + + :param sender: Any object or :obj:`ANY`. The decorated function + will only receive :meth:`send` emissions sent by *sender*. If + ``ANY``, the receiver will always be notified. A function may be + decorated multiple times with differing *sender* values. + + :param weak: If true, the Signal will hold a weakref to the + decorated function and automatically disconnect when *receiver* + goes out of scope or is garbage collected. Unlike + :meth:`connect`, this defaults to False. + + The decorated function will be invoked by :meth:`send` with + `sender=` as a single positional argument and any ``kwargs`` that + were provided to the call to :meth:`send`. + + + .. versionadded:: 1.1 + + """ + + def decorator(fn: T_callable) -> T_callable: + self.connect(fn, sender, weak) + return fn + + return decorator + + @contextmanager + def connected_to( + self, receiver: t.Callable, sender: t.Any = ANY + ) -> t.Generator[None, None, None]: + """Execute a block with the signal temporarily connected to *receiver*. + + :param receiver: a receiver callable + :param sender: optional, a sender to filter on + + This is a context manager for use in the ``with`` statement. It can + be useful in unit tests. *receiver* is connected to the signal for + the duration of the ``with`` block, and will be disconnected + automatically when exiting the block: + + .. code-block:: python + + with on_ready.connected_to(receiver): + # do stuff + on_ready.send(123) + + .. versionadded:: 1.1 + + """ + self.connect(receiver, sender=sender, weak=False) + try: + yield None + except Exception as e: + self.disconnect(receiver) + raise e + else: + self.disconnect(receiver) + + @contextmanager + def muted(self) -> t.Generator[None, None, None]: + """Context manager for temporarily disabling signal. + Useful for test purposes. + """ + self.is_muted = True + try: + yield None + except Exception as e: + raise e + finally: + self.is_muted = False + + def temporarily_connected_to( + self, receiver: t.Callable, sender: t.Any = ANY + ) -> t.ContextManager[None]: + """An alias for :meth:`connected_to`. + + :param receiver: a receiver callable + :param sender: optional, a sender to filter on + + .. versionadded:: 0.9 + + .. versionchanged:: 1.1 + Renamed to :meth:`connected_to`. ``temporarily_connected_to`` was + deprecated in 1.2 and will be removed in a subsequent version. + + """ + warn( + "temporarily_connected_to is deprecated; use connected_to instead.", + DeprecationWarning, + ) + return self.connected_to(receiver, sender) + + def send( + self, + *sender: t.Any, + _async_wrapper: AsyncWrapperType | None = None, + **kwargs: t.Any, + ) -> list[tuple[t.Callable, t.Any]]: + """Emit this signal on behalf of *sender*, passing on ``kwargs``. + + Returns a list of 2-tuples, pairing receivers with their return + value. The ordering of receiver notification is undefined. + + :param sender: Any object or ``None``. If omitted, synonymous + with ``None``. Only accepts one positional argument. + :param _async_wrapper: A callable that should wrap a coroutine + receiver and run it when called synchronously. + + :param kwargs: Data to be sent to receivers. + """ + if self.is_muted: + return [] + + sender = self._extract_sender(sender) + results = [] + for receiver in self.receivers_for(sender): + if is_coroutine_function(receiver): + if _async_wrapper is None: + raise RuntimeError("Cannot send to a coroutine function") + receiver = _async_wrapper(receiver) + result = receiver(sender, **kwargs) # type: ignore[call-arg] + results.append((receiver, result)) + return results + + async def send_async( + self, + *sender: t.Any, + _sync_wrapper: SyncWrapperType | None = None, + **kwargs: t.Any, + ) -> list[tuple[t.Callable, t.Any]]: + """Emit this signal on behalf of *sender*, passing on ``kwargs``. + + Returns a list of 2-tuples, pairing receivers with their return + value. The ordering of receiver notification is undefined. + + :param sender: Any object or ``None``. If omitted, synonymous + with ``None``. Only accepts one positional argument. + :param _sync_wrapper: A callable that should wrap a synchronous + receiver and run it when awaited. + + :param kwargs: Data to be sent to receivers. + """ + if self.is_muted: + return [] + + sender = self._extract_sender(sender) + results = [] + for receiver in self.receivers_for(sender): + if not is_coroutine_function(receiver): + if _sync_wrapper is None: + raise RuntimeError("Cannot send to a non-coroutine function") + receiver = _sync_wrapper(receiver) # type: ignore[arg-type] + result = await receiver(sender, **kwargs) # type: ignore[call-arg, misc] + results.append((receiver, result)) + return results + + def _extract_sender(self, sender: t.Any) -> t.Any: + if not self.receivers: + # Ensure correct signature even on no-op sends, disable with -O + # for lowest possible cost. + if __debug__ and sender and len(sender) > 1: + raise TypeError( + f"send() accepts only one positional argument, {len(sender)} given" + ) + return [] + + # Using '*sender' rather than 'sender=None' allows 'sender' to be + # used as a keyword argument- i.e. it's an invisible name in the + # function signature. + if len(sender) == 0: + sender = None + elif len(sender) > 1: + raise TypeError( + f"send() accepts only one positional argument, {len(sender)} given" + ) + else: + sender = sender[0] + return sender + + def has_receivers_for(self, sender: t.Any) -> bool: + """True if there is probably a receiver for *sender*. + + Performs an optimistic check only. Does not guarantee that all + weakly referenced receivers are still alive. See + :meth:`receivers_for` for a stronger search. + + """ + if not self.receivers: + return False + if self._by_sender[ANY_ID]: + return True + if sender is ANY: + return False + return hashable_identity(sender) in self._by_sender + + def receivers_for( + self, sender: t.Any + ) -> t.Generator[t.Callable | annotatable_weakref, None, None]: + """Iterate all live receivers listening for *sender*.""" + # TODO: test receivers_for(ANY) + if self.receivers: + sender_id = hashable_identity(sender) + if sender_id in self._by_sender: + ids = self._by_sender[ANY_ID] | self._by_sender[sender_id] + else: + ids = self._by_sender[ANY_ID].copy() + for receiver_id in ids: + receiver = self.receivers.get(receiver_id) + if receiver is None: + continue + if isinstance(receiver, WeakTypes): + strong = receiver() + if strong is None: + self._disconnect(receiver_id, ANY_ID) + continue + receiver = strong + yield receiver # type: ignore[misc] + + def disconnect(self, receiver: t.Callable, sender: t.Any = ANY) -> None: + """Disconnect *receiver* from this signal's events. + + :param receiver: a previously :meth:`connected<connect>` callable + + :param sender: a specific sender to disconnect from, or :obj:`ANY` + to disconnect from all senders. Defaults to ``ANY``. + + """ + sender_id: IdentityType + if sender is ANY: + sender_id = ANY_ID + else: + sender_id = hashable_identity(sender) + receiver_id = hashable_identity(receiver) + self._disconnect(receiver_id, sender_id) + + if ( + "receiver_disconnected" in self.__dict__ + and self.receiver_disconnected.receivers + ): + self.receiver_disconnected.send(self, receiver=receiver, sender=sender) + + def _disconnect(self, receiver_id: IdentityType, sender_id: IdentityType) -> None: + if sender_id == ANY_ID: + if self._by_receiver.pop(receiver_id, False): + for bucket in self._by_sender.values(): + bucket.discard(receiver_id) + self.receivers.pop(receiver_id, None) + else: + self._by_sender[sender_id].discard(receiver_id) + self._by_receiver[receiver_id].discard(sender_id) + + def _cleanup_receiver(self, receiver_ref: annotatable_weakref) -> None: + """Disconnect a receiver from all senders.""" + self._disconnect(t.cast(IdentityType, receiver_ref.receiver_id), ANY_ID) + + def _cleanup_sender(self, sender_ref: annotatable_weakref) -> None: + """Disconnect all receivers from a sender.""" + sender_id = t.cast(IdentityType, sender_ref.sender_id) + assert sender_id != ANY_ID + self._weak_senders.pop(sender_id, None) + for receiver_id in self._by_sender.pop(sender_id, ()): + self._by_receiver[receiver_id].discard(sender_id) + + def _cleanup_bookkeeping(self) -> None: + """Prune unused sender/receiver bookkeeping. Not threadsafe. + + Connecting & disconnecting leave behind a small amount of bookkeeping + for the receiver and sender values. Typical workloads using Blinker, + for example in most web apps, Flask, CLI scripts, etc., are not + adversely affected by this bookkeeping. + + With a long-running Python process performing dynamic signal routing + with high volume- e.g. connecting to function closures, "senders" are + all unique object instances, and doing all of this over and over- you + may see memory usage will grow due to extraneous bookkeeping. (An empty + set() for each stale sender/receiver pair.) + + This method will prune that bookkeeping away, with the caveat that such + pruning is not threadsafe. The risk is that cleanup of a fully + disconnected receiver/sender pair occurs while another thread is + connecting that same pair. If you are in the highly dynamic, unique + receiver/sender situation that has lead you to this method, that + failure mode is perhaps not a big deal for you. + """ + for mapping in (self._by_sender, self._by_receiver): + for _id, bucket in list(mapping.items()): + if not bucket: + mapping.pop(_id, None) + + def _clear_state(self) -> None: + """Throw away all signal state. Useful for unit tests.""" + self._weak_senders.clear() + self.receivers.clear() + self._by_sender.clear() + self._by_receiver.clear() + + +receiver_connected = Signal( + """\ +Sent by a :class:`Signal` after a receiver connects. + +:argument: the Signal that was connected to +:keyword receiver_arg: the connected receiver +:keyword sender_arg: the sender to connect to +:keyword weak_arg: true if the connection to receiver_arg is a weak reference + +.. deprecated:: 1.2 + +As of 1.2, individual signals have their own private +:attr:`~Signal.receiver_connected` and +:attr:`~Signal.receiver_disconnected` signals with a slightly simplified +call signature. This global signal is planned to be removed in 1.6. + +""" +) + + +class NamedSignal(Signal): + """A named generic notification emitter.""" + + def __init__(self, name: str, doc: str | None = None) -> None: + Signal.__init__(self, doc) + + #: The name of this signal. + self.name = name + + def __repr__(self) -> str: + base = Signal.__repr__(self) + return f"{base[:-1]}; {self.name!r}>" + + +class Namespace(dict): + """A mapping of signal names to signals.""" + + def signal(self, name: str, doc: str | None = None) -> NamedSignal: + """Return the :class:`NamedSignal` *name*, creating it if required. + + Repeated calls to this function will return the same signal object. + + """ + try: + return self[name] # type: ignore[no-any-return] + except KeyError: + result = self.setdefault(name, NamedSignal(name, doc)) + return result # type: ignore[no-any-return] + + +class WeakNamespace(WeakValueDictionary): + """A weak mapping of signal names to signals. + + Automatically cleans up unused Signals when the last reference goes out + of scope. This namespace implementation exists for a measure of legacy + compatibility with Blinker <= 1.2, and may be dropped in the future. + + .. versionadded:: 1.3 + + """ + + def signal(self, name: str, doc: str | None = None) -> NamedSignal: + """Return the :class:`NamedSignal` *name*, creating it if required. + + Repeated calls to this function will return the same signal object. + + """ + try: + return self[name] # type: ignore[no-any-return] + except KeyError: + result = self.setdefault(name, NamedSignal(name, doc)) + return result # type: ignore[no-any-return] + + +signal = Namespace().signal diff --git a/backend/test/lib/python3.8/site-packages/blinker/py.typed b/backend/test/lib/python3.8/site-packages/blinker/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/bson/__init__.py b/backend/test/lib/python3.8/site-packages/bson/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4379b947949be42ad766be83cde32efa5aa2f10c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/__init__.py @@ -0,0 +1,1459 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""BSON (Binary JSON) encoding and decoding. + +The mapping from Python types to BSON types is as follows: + +======================================= ============= =================== +Python Type BSON Type Supported Direction +======================================= ============= =================== +None null both +bool boolean both +int [#int]_ int32 / int64 py -> bson +`bson.int64.Int64` int64 both +float number (real) both +str string both +list array both +dict / `SON` object both +datetime.datetime [#dt]_ [#dt2]_ date both +`bson.regex.Regex` regex both +compiled re [#re]_ regex py -> bson +`bson.binary.Binary` binary both +`bson.objectid.ObjectId` oid both +`bson.dbref.DBRef` dbref both +None undefined bson -> py +`bson.code.Code` code both +str symbol bson -> py +bytes [#bytes]_ binary both +======================================= ============= =================== + +.. [#int] A Python int will be saved as a BSON int32 or BSON int64 depending + on its size. A BSON int32 will always decode to a Python int. A BSON + int64 will always decode to a :class:`~bson.int64.Int64`. +.. [#dt] datetime.datetime instances will be rounded to the nearest + millisecond when saved +.. [#dt2] all datetime.datetime instances are treated as *naive*. clients + should always use UTC. +.. [#re] :class:`~bson.regex.Regex` instances and regular expression + objects from ``re.compile()`` are both saved as BSON regular expressions. + BSON regular expressions are decoded as :class:`~bson.regex.Regex` + instances. +.. [#bytes] The bytes type is encoded as BSON binary with + subtype 0. It will be decoded back to bytes. +""" +import datetime +import itertools +import os +import re +import struct +import sys +import uuid +from codecs import utf_8_decode as _utf_8_decode +from codecs import utf_8_encode as _utf_8_encode +from collections import abc as _abc +from typing import ( + IO, + TYPE_CHECKING, + Any, + BinaryIO, + Callable, + Dict, + Generator, + Iterator, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) + +from bson.binary import ( + ALL_UUID_SUBTYPES, + CSHARP_LEGACY, + JAVA_LEGACY, + OLD_UUID_SUBTYPE, + STANDARD, + UUID_SUBTYPE, + Binary, + UuidRepresentation, +) +from bson.code import Code +from bson.codec_options import ( + DEFAULT_CODEC_OPTIONS, + CodecOptions, + DatetimeConversion, + _raw_document_class, +) +from bson.datetime_ms import ( + EPOCH_AWARE, + EPOCH_NAIVE, + DatetimeMS, + _datetime_to_millis, + _millis_to_datetime, + utc, +) +from bson.dbref import DBRef +from bson.decimal128 import Decimal128 +from bson.errors import InvalidBSON, InvalidDocument, InvalidStringData +from bson.int64 import Int64 +from bson.max_key import MaxKey +from bson.min_key import MinKey +from bson.objectid import ObjectId +from bson.regex import Regex +from bson.son import RE_TYPE, SON +from bson.timestamp import Timestamp + +# Import some modules for type-checking only. +if TYPE_CHECKING: + from bson.typings import _DocumentType, _ReadableBuffer + +try: + from bson import _cbson # type: ignore[attr-defined] + + _USE_C = True +except ImportError: + _USE_C = False + +__all__ = [ + "ALL_UUID_SUBTYPES", + "CSHARP_LEGACY", + "JAVA_LEGACY", + "OLD_UUID_SUBTYPE", + "STANDARD", + "UUID_SUBTYPE", + "Binary", + "UuidRepresentation", + "Code", + "DEFAULT_CODEC_OPTIONS", + "CodecOptions", + "DBRef", + "Decimal128", + "InvalidBSON", + "InvalidDocument", + "InvalidStringData", + "Int64", + "MaxKey", + "MinKey", + "ObjectId", + "Regex", + "RE_TYPE", + "SON", + "Timestamp", + "utc", + "EPOCH_AWARE", + "EPOCH_NAIVE", + "BSONNUM", + "BSONSTR", + "BSONOBJ", + "BSONARR", + "BSONBIN", + "BSONUND", + "BSONOID", + "BSONBOO", + "BSONDAT", + "BSONNUL", + "BSONRGX", + "BSONREF", + "BSONCOD", + "BSONSYM", + "BSONCWS", + "BSONINT", + "BSONTIM", + "BSONLON", + "BSONDEC", + "BSONMIN", + "BSONMAX", + "get_data_and_view", + "gen_list_name", + "encode", + "decode", + "decode_all", + "decode_iter", + "decode_file_iter", + "is_valid", + "BSON", + "has_c", + "DatetimeConversion", + "DatetimeMS", +] + +BSONNUM = b"\x01" # Floating point +BSONSTR = b"\x02" # UTF-8 string +BSONOBJ = b"\x03" # Embedded document +BSONARR = b"\x04" # Array +BSONBIN = b"\x05" # Binary +BSONUND = b"\x06" # Undefined +BSONOID = b"\x07" # ObjectId +BSONBOO = b"\x08" # Boolean +BSONDAT = b"\x09" # UTC Datetime +BSONNUL = b"\x0A" # Null +BSONRGX = b"\x0B" # Regex +BSONREF = b"\x0C" # DBRef +BSONCOD = b"\x0D" # Javascript code +BSONSYM = b"\x0E" # Symbol +BSONCWS = b"\x0F" # Javascript code with scope +BSONINT = b"\x10" # 32bit int +BSONTIM = b"\x11" # Timestamp +BSONLON = b"\x12" # 64bit int +BSONDEC = b"\x13" # Decimal128 +BSONMIN = b"\xFF" # Min key +BSONMAX = b"\x7F" # Max key + + +_UNPACK_FLOAT_FROM = struct.Struct("<d").unpack_from +_UNPACK_INT = struct.Struct("<i").unpack +_UNPACK_INT_FROM = struct.Struct("<i").unpack_from +_UNPACK_LENGTH_SUBTYPE_FROM = struct.Struct("<iB").unpack_from +_UNPACK_LONG_FROM = struct.Struct("<q").unpack_from +_UNPACK_TIMESTAMP_FROM = struct.Struct("<II").unpack_from + + +def get_data_and_view(data: Any) -> Tuple[Any, memoryview]: + if isinstance(data, (bytes, bytearray)): + return data, memoryview(data) + view = memoryview(data) + return view.tobytes(), view + + +def _raise_unknown_type(element_type: int, element_name: str) -> NoReturn: + """Unknown type helper.""" + raise InvalidBSON( + "Detected unknown BSON type {!r} for fieldname '{}'. Are " + "you using the latest driver version?".format(chr(element_type).encode(), element_name) + ) + + +def _get_int( + data: Any, view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any +) -> Tuple[int, int]: + """Decode a BSON int32 to python int.""" + return _UNPACK_INT_FROM(data, position)[0], position + 4 + + +def _get_c_string(data: Any, view: Any, position: int, opts: CodecOptions) -> Tuple[str, int]: + """Decode a BSON 'C' string to python str.""" + end = data.index(b"\x00", position) + return _utf_8_decode(view[position:end], opts.unicode_decode_error_handler, True)[0], end + 1 + + +def _get_float( + data: Any, view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any +) -> Tuple[float, int]: + """Decode a BSON double to python float.""" + return _UNPACK_FLOAT_FROM(data, position)[0], position + 8 + + +def _get_string( + data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions, dummy: Any +) -> Tuple[str, int]: + """Decode a BSON string to python str.""" + length = _UNPACK_INT_FROM(data, position)[0] + position += 4 + if length < 1 or obj_end - position < length: + raise InvalidBSON("invalid string length") + end = position + length - 1 + if data[end] != 0: + raise InvalidBSON("invalid end of string") + return _utf_8_decode(view[position:end], opts.unicode_decode_error_handler, True)[0], end + 1 + + +def _get_object_size(data: Any, position: int, obj_end: int) -> Tuple[int, int]: + """Validate and return a BSON document's size.""" + try: + obj_size = _UNPACK_INT_FROM(data, position)[0] + except struct.error as exc: + raise InvalidBSON(str(exc)) + end = position + obj_size - 1 + if data[end] != 0: + raise InvalidBSON("bad eoo") + if end >= obj_end: + raise InvalidBSON("invalid object length") + # If this is the top-level document, validate the total size too. + if position == 0 and obj_size != obj_end: + raise InvalidBSON("invalid object length") + return obj_size, end + + +def _get_object( + data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions, dummy: Any +) -> Tuple[Any, int]: + """Decode a BSON subdocument to opts.document_class or bson.dbref.DBRef.""" + obj_size, end = _get_object_size(data, position, obj_end) + if _raw_document_class(opts.document_class): + return (opts.document_class(data[position : end + 1], opts), position + obj_size) + + obj = _elements_to_dict(data, view, position + 4, end, opts) + + position += obj_size + # If DBRef validation fails, return a normal doc. + if ( + isinstance(obj.get("$ref"), str) + and "$id" in obj + and isinstance(obj.get("$db"), (str, type(None))) + ): + return (DBRef(obj.pop("$ref"), obj.pop("$id", None), obj.pop("$db", None), obj), position) + return obj, position + + +def _get_array( + data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions, element_name: str +) -> Tuple[Any, int]: + """Decode a BSON array to python list.""" + size = _UNPACK_INT_FROM(data, position)[0] + end = position + size - 1 + if data[end] != 0: + raise InvalidBSON("bad eoo") + + position += 4 + end -= 1 + result: List[Any] = [] + + # Avoid doing global and attribute lookups in the loop. + append = result.append + index = data.index + getter = _ELEMENT_GETTER + decoder_map = opts.type_registry._decoder_map + + while position < end: + element_type = data[position] + # Just skip the keys. + position = index(b"\x00", position) + 1 + try: + value, position = getter[element_type]( + data, view, position, obj_end, opts, element_name + ) + except KeyError: + _raise_unknown_type(element_type, element_name) + + if decoder_map: + custom_decoder = decoder_map.get(type(value)) + if custom_decoder is not None: + value = custom_decoder(value) + + append(value) + + if position != end + 1: + raise InvalidBSON("bad array length") + return result, position + 1 + + +def _get_binary( + data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions, dummy1: Any +) -> Tuple[Union[Binary, uuid.UUID], int]: + """Decode a BSON binary to bson.binary.Binary or python UUID.""" + length, subtype = _UNPACK_LENGTH_SUBTYPE_FROM(data, position) + position += 5 + if subtype == 2: + length2 = _UNPACK_INT_FROM(data, position)[0] + position += 4 + if length2 != length - 4: + raise InvalidBSON("invalid binary (st 2) - lengths don't match!") + length = length2 + end = position + length + if length < 0 or end > obj_end: + raise InvalidBSON("bad binary object length") + + # Convert UUID subtypes to native UUIDs. + if subtype in ALL_UUID_SUBTYPES: + uuid_rep = opts.uuid_representation + binary_value = Binary(data[position:end], subtype) + if ( + (uuid_rep == UuidRepresentation.UNSPECIFIED) + or (subtype == UUID_SUBTYPE and uuid_rep != STANDARD) + or (subtype == OLD_UUID_SUBTYPE and uuid_rep == STANDARD) + ): + return binary_value, end + return binary_value.as_uuid(uuid_rep), end + + # Decode subtype 0 to 'bytes'. + if subtype == 0: + value = data[position:end] + else: + value = Binary(data[position:end], subtype) + + return value, end + + +def _get_oid( + data: Any, view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any +) -> Tuple[ObjectId, int]: + """Decode a BSON ObjectId to bson.objectid.ObjectId.""" + end = position + 12 + return ObjectId(data[position:end]), end + + +def _get_boolean( + data: Any, view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any +) -> Tuple[bool, int]: + """Decode a BSON true/false to python True/False.""" + end = position + 1 + boolean_byte = data[position:end] + if boolean_byte == b"\x00": + return False, end + elif boolean_byte == b"\x01": + return True, end + raise InvalidBSON("invalid boolean value: %r" % boolean_byte) + + +def _get_date( + data: Any, view: Any, position: int, dummy0: int, opts: CodecOptions, dummy1: Any +) -> Tuple[Union[datetime.datetime, DatetimeMS], int]: + """Decode a BSON datetime to python datetime.datetime.""" + return _millis_to_datetime(_UNPACK_LONG_FROM(data, position)[0], opts), position + 8 + + +def _get_code( + data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions, element_name: str +) -> Tuple[Code, int]: + """Decode a BSON code to bson.code.Code.""" + code, position = _get_string(data, view, position, obj_end, opts, element_name) + return Code(code), position + + +def _get_code_w_scope( + data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions, element_name: str +) -> Tuple[Code, int]: + """Decode a BSON code_w_scope to bson.code.Code.""" + code_end = position + _UNPACK_INT_FROM(data, position)[0] + code, position = _get_string(data, view, position + 4, code_end, opts, element_name) + scope, position = _get_object(data, view, position, code_end, opts, element_name) + if position != code_end: + raise InvalidBSON("scope outside of javascript code boundaries") + return Code(code, scope), position + + +def _get_regex( + data: Any, view: Any, position: int, dummy0: Any, opts: CodecOptions, dummy1: Any +) -> Tuple[Regex, int]: + """Decode a BSON regex to bson.regex.Regex or a python pattern object.""" + pattern, position = _get_c_string(data, view, position, opts) + bson_flags, position = _get_c_string(data, view, position, opts) + bson_re = Regex(pattern, bson_flags) + return bson_re, position + + +def _get_ref( + data: Any, view: Any, position: int, obj_end: int, opts: CodecOptions, element_name: str +) -> Tuple[DBRef, int]: + """Decode (deprecated) BSON DBPointer to bson.dbref.DBRef.""" + collection, position = _get_string(data, view, position, obj_end, opts, element_name) + oid, position = _get_oid(data, view, position, obj_end, opts, element_name) + return DBRef(collection, oid), position + + +def _get_timestamp( + data: Any, view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any +) -> Tuple[Timestamp, int]: + """Decode a BSON timestamp to bson.timestamp.Timestamp.""" + inc, timestamp = _UNPACK_TIMESTAMP_FROM(data, position) + return Timestamp(timestamp, inc), position + 8 + + +def _get_int64( + data: Any, view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any +) -> Tuple[Int64, int]: + """Decode a BSON int64 to bson.int64.Int64.""" + return Int64(_UNPACK_LONG_FROM(data, position)[0]), position + 8 + + +def _get_decimal128( + data: Any, view: Any, position: int, dummy0: Any, dummy1: Any, dummy2: Any +) -> Tuple[Decimal128, int]: + """Decode a BSON decimal128 to bson.decimal128.Decimal128.""" + end = position + 16 + return Decimal128.from_bid(data[position:end]), end + + +# Each decoder function's signature is: +# - data: bytes +# - view: memoryview that references `data` +# - position: int, beginning of object in 'data' to decode +# - obj_end: int, end of object to decode in 'data' if variable-length type +# - opts: a CodecOptions +_ELEMENT_GETTER: Dict[int, Callable[..., Tuple[Any, int]]] = { + ord(BSONNUM): _get_float, + ord(BSONSTR): _get_string, + ord(BSONOBJ): _get_object, + ord(BSONARR): _get_array, + ord(BSONBIN): _get_binary, + ord(BSONUND): lambda u, v, w, x, y, z: (None, w), # Deprecated undefined + ord(BSONOID): _get_oid, + ord(BSONBOO): _get_boolean, + ord(BSONDAT): _get_date, + ord(BSONNUL): lambda u, v, w, x, y, z: (None, w), + ord(BSONRGX): _get_regex, + ord(BSONREF): _get_ref, # Deprecated DBPointer + ord(BSONCOD): _get_code, + ord(BSONSYM): _get_string, # Deprecated symbol + ord(BSONCWS): _get_code_w_scope, + ord(BSONINT): _get_int, + ord(BSONTIM): _get_timestamp, + ord(BSONLON): _get_int64, + ord(BSONDEC): _get_decimal128, + ord(BSONMIN): lambda u, v, w, x, y, z: (MinKey(), w), + ord(BSONMAX): lambda u, v, w, x, y, z: (MaxKey(), w), +} + + +if _USE_C: + + def _element_to_dict( + data: Any, + view: Any, + position: int, + obj_end: int, + opts: CodecOptions, + raw_array: bool = False, + ) -> Any: + return _cbson._element_to_dict(data, position, obj_end, opts, raw_array) + +else: + + def _element_to_dict( + data: Any, + view: Any, + position: int, + obj_end: int, + opts: CodecOptions, + raw_array: bool = False, + ) -> Any: + """Decode a single key, value pair.""" + element_type = data[position] + position += 1 + element_name, position = _get_c_string(data, view, position, opts) + if raw_array and element_type == ord(BSONARR): + _, end = _get_object_size(data, position, len(data)) + return element_name, view[position : end + 1], end + 1 + try: + value, position = _ELEMENT_GETTER[element_type]( + data, view, position, obj_end, opts, element_name + ) + except KeyError: + _raise_unknown_type(element_type, element_name) + + if opts.type_registry._decoder_map: + custom_decoder = opts.type_registry._decoder_map.get(type(value)) + if custom_decoder is not None: + value = custom_decoder(value) + + return element_name, value, position + + +_T = TypeVar("_T", bound=MutableMapping[Any, Any]) + + +def _raw_to_dict( + data: Any, position: int, obj_end: int, opts: CodecOptions, result: _T, raw_array: bool = False +) -> _T: + data, view = get_data_and_view(data) + return _elements_to_dict(data, view, position, obj_end, opts, result, raw_array=raw_array) + + +def _elements_to_dict( + data: Any, + view: Any, + position: int, + obj_end: int, + opts: CodecOptions, + result: Any = None, + raw_array: bool = False, +) -> Any: + """Decode a BSON document into result.""" + if result is None: + result = opts.document_class() + end = obj_end - 1 + while position < end: + key, value, position = _element_to_dict( + data, view, position, obj_end, opts, raw_array=raw_array + ) + result[key] = value + if position != obj_end: + raise InvalidBSON("bad object or element length") + return result + + +def _bson_to_dict(data: Any, opts: CodecOptions) -> Any: + """Decode a BSON string to document_class.""" + data, view = get_data_and_view(data) + try: + if _raw_document_class(opts.document_class): + return opts.document_class(data, opts) + _, end = _get_object_size(data, 0, len(data)) + return _elements_to_dict(data, view, 4, end, opts) + except InvalidBSON: + raise + except Exception: + # Change exception type to InvalidBSON but preserve traceback. + _, exc_value, exc_tb = sys.exc_info() + raise InvalidBSON(str(exc_value)).with_traceback(exc_tb) + + +if _USE_C: + _bson_to_dict = _cbson._bson_to_dict # noqa: F811 + + +_PACK_FLOAT = struct.Struct("<d").pack +_PACK_INT = struct.Struct("<i").pack +_PACK_LENGTH_SUBTYPE = struct.Struct("<iB").pack +_PACK_LONG = struct.Struct("<q").pack +_PACK_TIMESTAMP = struct.Struct("<II").pack +_LIST_NAMES = tuple((str(i) + "\x00").encode("utf8") for i in range(1000)) + + +def gen_list_name() -> Generator[bytes, None, None]: + """Generate "keys" for encoded lists in the sequence + b"0\x00", b"1\x00", b"2\x00", ... + + The first 1000 keys are returned from a pre-built cache. All + subsequent keys are generated on the fly. + """ + yield from _LIST_NAMES + + counter = itertools.count(1000) + while True: + yield (str(next(counter)) + "\x00").encode("utf8") + + +def _make_c_string_check(string: Union[str, bytes]) -> bytes: + """Make a 'C' string, checking for embedded NUL characters.""" + if isinstance(string, bytes): + if b"\x00" in string: + raise InvalidDocument("BSON keys / regex patterns must not contain a NUL character") + try: + _utf_8_decode(string, None, True) + return string + b"\x00" + except UnicodeError: + raise InvalidStringData("strings in documents must be valid UTF-8: %r" % string) + else: + if "\x00" in string: + raise InvalidDocument("BSON keys / regex patterns must not contain a NUL character") + return _utf_8_encode(string)[0] + b"\x00" + + +def _make_c_string(string: Union[str, bytes]) -> bytes: + """Make a 'C' string.""" + if isinstance(string, bytes): + try: + _utf_8_decode(string, None, True) + return string + b"\x00" + except UnicodeError: + raise InvalidStringData("strings in documents must be valid UTF-8: %r" % string) + else: + return _utf_8_encode(string)[0] + b"\x00" + + +def _make_name(string: str) -> bytes: + """Make a 'C' string suitable for a BSON key.""" + if "\x00" in string: + raise InvalidDocument("BSON keys must not contain a NUL character") + return _utf_8_encode(string)[0] + b"\x00" + + +def _encode_float(name: bytes, value: float, dummy0: Any, dummy1: Any) -> bytes: + """Encode a float.""" + return b"\x01" + name + _PACK_FLOAT(value) + + +def _encode_bytes(name: bytes, value: bytes, dummy0: Any, dummy1: Any) -> bytes: + """Encode a python bytes.""" + # Python3 special case. Store 'bytes' as BSON binary subtype 0. + return b"\x05" + name + _PACK_INT(len(value)) + b"\x00" + value + + +def _encode_mapping(name: bytes, value: Any, check_keys: bool, opts: CodecOptions) -> bytes: + """Encode a mapping type.""" + if _raw_document_class(value): + return b"\x03" + name + value.raw + data = b"".join([_element_to_bson(key, val, check_keys, opts) for key, val in value.items()]) + return b"\x03" + name + _PACK_INT(len(data) + 5) + data + b"\x00" + + +def _encode_dbref(name: bytes, value: DBRef, check_keys: bool, opts: CodecOptions) -> bytes: + """Encode bson.dbref.DBRef.""" + buf = bytearray(b"\x03" + name + b"\x00\x00\x00\x00") + begin = len(buf) - 4 + + buf += _name_value_to_bson(b"$ref\x00", value.collection, check_keys, opts) + buf += _name_value_to_bson(b"$id\x00", value.id, check_keys, opts) + if value.database is not None: + buf += _name_value_to_bson(b"$db\x00", value.database, check_keys, opts) + for key, val in value._DBRef__kwargs.items(): + buf += _element_to_bson(key, val, check_keys, opts) + + buf += b"\x00" + buf[begin : begin + 4] = _PACK_INT(len(buf) - begin) + return bytes(buf) + + +def _encode_list(name: bytes, value: Sequence[Any], check_keys: bool, opts: CodecOptions) -> bytes: + """Encode a list/tuple.""" + lname = gen_list_name() + data = b"".join([_name_value_to_bson(next(lname), item, check_keys, opts) for item in value]) + return b"\x04" + name + _PACK_INT(len(data) + 5) + data + b"\x00" + + +def _encode_text(name: bytes, value: str, dummy0: Any, dummy1: Any) -> bytes: + """Encode a python str.""" + bvalue = _utf_8_encode(value)[0] + return b"\x02" + name + _PACK_INT(len(bvalue) + 1) + bvalue + b"\x00" + + +def _encode_binary(name: bytes, value: Binary, dummy0: Any, dummy1: Any) -> bytes: + """Encode bson.binary.Binary.""" + subtype = value.subtype + if subtype == 2: + value = _PACK_INT(len(value)) + value # type: ignore + return b"\x05" + name + _PACK_LENGTH_SUBTYPE(len(value), subtype) + value + + +def _encode_uuid(name: bytes, value: uuid.UUID, dummy: Any, opts: CodecOptions) -> bytes: + """Encode uuid.UUID.""" + uuid_representation = opts.uuid_representation + binval = Binary.from_uuid(value, uuid_representation=uuid_representation) + return _encode_binary(name, binval, dummy, opts) + + +def _encode_objectid(name: bytes, value: ObjectId, dummy: Any, dummy1: Any) -> bytes: + """Encode bson.objectid.ObjectId.""" + return b"\x07" + name + value.binary + + +def _encode_bool(name: bytes, value: bool, dummy0: Any, dummy1: Any) -> bytes: + """Encode a python boolean (True/False).""" + return b"\x08" + name + (value and b"\x01" or b"\x00") + + +def _encode_datetime(name: bytes, value: datetime.datetime, dummy0: Any, dummy1: Any) -> bytes: + """Encode datetime.datetime.""" + millis = _datetime_to_millis(value) + return b"\x09" + name + _PACK_LONG(millis) + + +def _encode_datetime_ms(name: bytes, value: DatetimeMS, dummy0: Any, dummy1: Any) -> bytes: + """Encode datetime.datetime.""" + millis = int(value) + return b"\x09" + name + _PACK_LONG(millis) + + +def _encode_none(name: bytes, dummy0: Any, dummy1: Any, dummy2: Any) -> bytes: + """Encode python None.""" + return b"\x0A" + name + + +def _encode_regex(name: bytes, value: Regex, dummy0: Any, dummy1: Any) -> bytes: + """Encode a python regex or bson.regex.Regex.""" + flags = value.flags + # Python 3 common case + if flags == re.UNICODE: + return b"\x0B" + name + _make_c_string_check(value.pattern) + b"u\x00" + elif flags == 0: + return b"\x0B" + name + _make_c_string_check(value.pattern) + b"\x00" + else: + sflags = b"" + if flags & re.IGNORECASE: + sflags += b"i" + if flags & re.LOCALE: + sflags += b"l" + if flags & re.MULTILINE: + sflags += b"m" + if flags & re.DOTALL: + sflags += b"s" + if flags & re.UNICODE: + sflags += b"u" + if flags & re.VERBOSE: + sflags += b"x" + sflags += b"\x00" + return b"\x0B" + name + _make_c_string_check(value.pattern) + sflags + + +def _encode_code(name: bytes, value: Code, dummy: Any, opts: CodecOptions) -> bytes: + """Encode bson.code.Code.""" + cstring = _make_c_string(value) + cstrlen = len(cstring) + if value.scope is None: + return b"\x0D" + name + _PACK_INT(cstrlen) + cstring + scope = _dict_to_bson(value.scope, False, opts, False) + full_length = _PACK_INT(8 + cstrlen + len(scope)) + return b"\x0F" + name + full_length + _PACK_INT(cstrlen) + cstring + scope + + +def _encode_int(name: bytes, value: int, dummy0: Any, dummy1: Any) -> bytes: + """Encode a python int.""" + if -2147483648 <= value <= 2147483647: + return b"\x10" + name + _PACK_INT(value) + else: + try: + return b"\x12" + name + _PACK_LONG(value) + except struct.error: + raise OverflowError("BSON can only handle up to 8-byte ints") + + +def _encode_timestamp(name: bytes, value: Any, dummy0: Any, dummy1: Any) -> bytes: + """Encode bson.timestamp.Timestamp.""" + return b"\x11" + name + _PACK_TIMESTAMP(value.inc, value.time) + + +def _encode_long(name: bytes, value: Any, dummy0: Any, dummy1: Any) -> bytes: + """Encode a bson.int64.Int64.""" + try: + return b"\x12" + name + _PACK_LONG(value) + except struct.error: + raise OverflowError("BSON can only handle up to 8-byte ints") + + +def _encode_decimal128(name: bytes, value: Decimal128, dummy0: Any, dummy1: Any) -> bytes: + """Encode bson.decimal128.Decimal128.""" + return b"\x13" + name + value.bid + + +def _encode_minkey(name: bytes, dummy0: Any, dummy1: Any, dummy2: Any) -> bytes: + """Encode bson.min_key.MinKey.""" + return b"\xFF" + name + + +def _encode_maxkey(name: bytes, dummy0: Any, dummy1: Any, dummy2: Any) -> bytes: + """Encode bson.max_key.MaxKey.""" + return b"\x7F" + name + + +# Each encoder function's signature is: +# - name: utf-8 bytes +# - value: a Python data type, e.g. a Python int for _encode_int +# - check_keys: bool, whether to check for invalid names +# - opts: a CodecOptions +_ENCODERS = { + bool: _encode_bool, + bytes: _encode_bytes, + datetime.datetime: _encode_datetime, + DatetimeMS: _encode_datetime_ms, + dict: _encode_mapping, + float: _encode_float, + int: _encode_int, + list: _encode_list, + str: _encode_text, + tuple: _encode_list, + type(None): _encode_none, + uuid.UUID: _encode_uuid, + Binary: _encode_binary, + Int64: _encode_long, + Code: _encode_code, + DBRef: _encode_dbref, + MaxKey: _encode_maxkey, + MinKey: _encode_minkey, + ObjectId: _encode_objectid, + Regex: _encode_regex, + RE_TYPE: _encode_regex, + SON: _encode_mapping, + Timestamp: _encode_timestamp, + Decimal128: _encode_decimal128, + # Special case. This will never be looked up directly. + _abc.Mapping: _encode_mapping, +} + + +_MARKERS = { + 5: _encode_binary, + 7: _encode_objectid, + 11: _encode_regex, + 13: _encode_code, + 17: _encode_timestamp, + 18: _encode_long, + 100: _encode_dbref, + 127: _encode_maxkey, + 255: _encode_minkey, +} + + +_BUILT_IN_TYPES = tuple(t for t in _ENCODERS) + + +def _name_value_to_bson( + name: bytes, + value: Any, + check_keys: bool, + opts: CodecOptions, + in_custom_call: bool = False, + in_fallback_call: bool = False, +) -> bytes: + """Encode a single name, value pair.""" + + was_integer_overflow = False + + # First see if the type is already cached. KeyError will only ever + # happen once per subtype. + try: + return _ENCODERS[type(value)](name, value, check_keys, opts) # type: ignore + except KeyError: + pass + except OverflowError: + if not isinstance(value, int): + raise + + # Give the fallback_encoder a chance + was_integer_overflow = True + + # Second, fall back to trying _type_marker. This has to be done + # before the loop below since users could subclass one of our + # custom types that subclasses a python built-in (e.g. Binary) + marker = getattr(value, "_type_marker", None) + if isinstance(marker, int) and marker in _MARKERS: + func = _MARKERS[marker] + # Cache this type for faster subsequent lookup. + _ENCODERS[type(value)] = func + return func(name, value, check_keys, opts) # type: ignore + + # Third, check if a type encoder is registered for this type. + # Note that subtypes of registered custom types are not auto-encoded. + if not in_custom_call and opts.type_registry._encoder_map: + custom_encoder = opts.type_registry._encoder_map.get(type(value)) + if custom_encoder is not None: + return _name_value_to_bson( + name, custom_encoder(value), check_keys, opts, in_custom_call=True + ) + + # Fourth, test each base type. This will only happen once for + # a subtype of a supported base type. Unlike in the C-extensions, this + # is done after trying the custom type encoder because checking for each + # subtype is expensive. + for base in _BUILT_IN_TYPES: + if not was_integer_overflow and isinstance(value, base): + func = _ENCODERS[base] + # Cache this type for faster subsequent lookup. + _ENCODERS[type(value)] = func + return func(name, value, check_keys, opts) # type: ignore + + # As a last resort, try using the fallback encoder, if the user has + # provided one. + fallback_encoder = opts.type_registry._fallback_encoder + if not in_fallback_call and fallback_encoder is not None: + return _name_value_to_bson( + name, fallback_encoder(value), check_keys, opts, in_fallback_call=True + ) + + if was_integer_overflow: + raise OverflowError("BSON can only handle up to 8-byte ints") + raise InvalidDocument(f"cannot encode object: {value!r}, of type: {type(value)!r}") + + +def _element_to_bson(key: Any, value: Any, check_keys: bool, opts: CodecOptions) -> bytes: + """Encode a single key, value pair.""" + if not isinstance(key, str): + raise InvalidDocument(f"documents must have only string keys, key was {key!r}") + if check_keys: + if key.startswith("$"): + raise InvalidDocument(f"key {key!r} must not start with '$'") + if "." in key: + raise InvalidDocument(f"key {key!r} must not contain '.'") + + name = _make_name(key) + return _name_value_to_bson(name, value, check_keys, opts) + + +def _dict_to_bson(doc: Any, check_keys: bool, opts: CodecOptions, top_level: bool = True) -> bytes: + """Encode a document to BSON.""" + if _raw_document_class(doc): + return cast(bytes, doc.raw) + try: + elements = [] + if top_level and "_id" in doc: + elements.append(_name_value_to_bson(b"_id\x00", doc["_id"], check_keys, opts)) + for key, value in doc.items(): + if not top_level or key != "_id": + elements.append(_element_to_bson(key, value, check_keys, opts)) + except AttributeError: + raise TypeError(f"encoder expected a mapping type but got: {doc!r}") + + encoded = b"".join(elements) + return _PACK_INT(len(encoded) + 5) + encoded + b"\x00" + + +if _USE_C: + _dict_to_bson = _cbson._dict_to_bson # noqa: F811 + + +_CODEC_OPTIONS_TYPE_ERROR = TypeError("codec_options must be an instance of CodecOptions") + + +def encode( + document: Mapping[str, Any], + check_keys: bool = False, + codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS, +) -> bytes: + """Encode a document to BSON. + + A document can be any mapping type (like :class:`dict`). + + Raises :class:`TypeError` if `document` is not a mapping type, + or contains keys that are not instances of :class:`str`. Raises + :class:`~bson.errors.InvalidDocument` if `document` cannot be + converted to :class:`BSON`. + + :Parameters: + - `document`: mapping type representing a document + - `check_keys` (optional): check if keys start with '$' or + contain '.', raising :class:`~bson.errors.InvalidDocument` in + either case + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. + + .. versionadded:: 3.9 + """ + if not isinstance(codec_options, CodecOptions): + raise _CODEC_OPTIONS_TYPE_ERROR + + return _dict_to_bson(document, check_keys, codec_options) + + +@overload +def decode(data: "_ReadableBuffer", codec_options: None = None) -> Dict[str, Any]: + ... + + +@overload +def decode( + data: "_ReadableBuffer", codec_options: "CodecOptions[_DocumentType]" +) -> "_DocumentType": + ... + + +def decode( + data: "_ReadableBuffer", codec_options: "Optional[CodecOptions[_DocumentType]]" = None +) -> Union[Dict[str, Any], "_DocumentType"]: + """Decode BSON to a document. + + By default, returns a BSON document represented as a Python + :class:`dict`. To use a different :class:`MutableMapping` class, + configure a :class:`~bson.codec_options.CodecOptions`:: + + >>> import collections # From Python standard library. + >>> import bson + >>> from bson.codec_options import CodecOptions + >>> data = bson.encode({'a': 1}) + >>> decoded_doc = bson.decode(data) + <type 'dict'> + >>> options = CodecOptions(document_class=collections.OrderedDict) + >>> decoded_doc = bson.decode(data, codec_options=options) + >>> type(decoded_doc) + <class 'collections.OrderedDict'> + + :Parameters: + - `data`: the BSON to decode. Any bytes-like object that implements + the buffer protocol. + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. + + .. versionadded:: 3.9 + """ + opts: CodecOptions = codec_options or DEFAULT_CODEC_OPTIONS + if not isinstance(opts, CodecOptions): + raise _CODEC_OPTIONS_TYPE_ERROR + + return _bson_to_dict(data, opts) + + +def _decode_all( + data: "_ReadableBuffer", opts: "CodecOptions[_DocumentType]" +) -> "List[_DocumentType]": + """Decode a BSON data to multiple documents.""" + data, view = get_data_and_view(data) + data_len = len(data) + docs: "List[_DocumentType]" = [] + position = 0 + end = data_len - 1 + use_raw = _raw_document_class(opts.document_class) + try: + while position < end: + obj_size = _UNPACK_INT_FROM(data, position)[0] + if data_len - position < obj_size: + raise InvalidBSON("invalid object size") + obj_end = position + obj_size - 1 + if data[obj_end] != 0: + raise InvalidBSON("bad eoo") + if use_raw: + docs.append(opts.document_class(data[position : obj_end + 1], opts)) # type: ignore + else: + docs.append(_elements_to_dict(data, view, position + 4, obj_end, opts)) + position += obj_size + return docs + except InvalidBSON: + raise + except Exception: + # Change exception type to InvalidBSON but preserve traceback. + _, exc_value, exc_tb = sys.exc_info() + raise InvalidBSON(str(exc_value)).with_traceback(exc_tb) + + +if _USE_C: + _decode_all = _cbson._decode_all # noqa: F811 + + +@overload +def decode_all(data: "_ReadableBuffer", codec_options: None = None) -> "List[Dict[str, Any]]": + ... + + +@overload +def decode_all( + data: "_ReadableBuffer", codec_options: "CodecOptions[_DocumentType]" +) -> "List[_DocumentType]": + ... + + +def decode_all( + data: "_ReadableBuffer", codec_options: "Optional[CodecOptions[_DocumentType]]" = None +) -> "Union[List[Dict[str, Any]], List[_DocumentType]]": + """Decode BSON data to multiple documents. + + `data` must be a bytes-like object implementing the buffer protocol that + provides concatenated, valid, BSON-encoded documents. + + :Parameters: + - `data`: BSON data + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. + + .. versionchanged:: 3.9 + Supports bytes-like objects that implement the buffer protocol. + + .. versionchanged:: 3.0 + Removed `compile_re` option: PyMongo now always represents BSON regular + expressions as :class:`~bson.regex.Regex` objects. Use + :meth:`~bson.regex.Regex.try_compile` to attempt to convert from a + BSON regular expression to a Python regular expression object. + + Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with + `codec_options`. + """ + if codec_options is None: + return _decode_all(data, DEFAULT_CODEC_OPTIONS) + + if not isinstance(codec_options, CodecOptions): + raise _CODEC_OPTIONS_TYPE_ERROR + + return _decode_all(data, codec_options) + + +def _decode_selective(rawdoc: Any, fields: Any, codec_options: Any) -> Mapping[Any, Any]: + if _raw_document_class(codec_options.document_class): + # If document_class is RawBSONDocument, use vanilla dictionary for + # decoding command response. + doc = {} + else: + # Else, use the specified document_class. + doc = codec_options.document_class() + for key, value in rawdoc.items(): + if key in fields: + if fields[key] == 1: + doc[key] = _bson_to_dict(rawdoc.raw, codec_options)[key] + else: + doc[key] = _decode_selective(value, fields[key], codec_options) + else: + doc[key] = value + return doc + + +def _array_of_documents_to_buffer(view: memoryview) -> bytes: + # Extract the raw bytes of each document. + position = 0 + _, end = _get_object_size(view, position, len(view)) + position += 4 + buffers: List[memoryview] = [] + append = buffers.append + while position < end - 1: + # Just skip the keys. + while view[position] != 0: + position += 1 + position += 1 + obj_size, _ = _get_object_size(view, position, end) + append(view[position : position + obj_size]) + position += obj_size + if position != end: + raise InvalidBSON("bad object or element length") + return b"".join(buffers) + + +if _USE_C: + _array_of_documents_to_buffer = _cbson._array_of_documents_to_buffer # noqa: F811 + + +def _convert_raw_document_lists_to_streams(document: Any) -> None: + """Convert raw array of documents to a stream of BSON documents.""" + cursor = document.get("cursor") + if not cursor: + return + for key in ("firstBatch", "nextBatch"): + batch = cursor.get(key) + if not batch: + continue + data = _array_of_documents_to_buffer(batch) + if data: + cursor[key] = [data] + else: + cursor[key] = [] + + +def _decode_all_selective(data: Any, codec_options: CodecOptions, fields: Any) -> List[Any]: + """Decode BSON data to a single document while using user-provided + custom decoding logic. + + `data` must be a string representing a valid, BSON-encoded document. + + :Parameters: + - `data`: BSON data + - `codec_options`: An instance of + :class:`~bson.codec_options.CodecOptions` with user-specified type + decoders. If no decoders are found, this method is the same as + ``decode_all``. + - `fields`: Map of document namespaces where data that needs + to be custom decoded lives or None. For example, to custom decode a + list of objects in 'field1.subfield1', the specified value should be + ``{'field1': {'subfield1': 1}}``. If ``fields`` is an empty map or + None, this method is the same as ``decode_all``. + + :Returns: + - `document_list`: Single-member list containing the decoded document. + + .. versionadded:: 3.8 + """ + if not codec_options.type_registry._decoder_map: + return decode_all(data, codec_options) + + if not fields: + return decode_all(data, codec_options.with_options(type_registry=None)) + + # Decode documents for internal use. + from bson.raw_bson import RawBSONDocument + + internal_codec_options: CodecOptions[RawBSONDocument] = codec_options.with_options( + document_class=RawBSONDocument, type_registry=None + ) + _doc = _bson_to_dict(data, internal_codec_options) + return [ + _decode_selective( + _doc, + fields, + codec_options, + ) + ] + + +@overload +def decode_iter(data: bytes, codec_options: None = None) -> "Iterator[Dict[str, Any]]": + ... + + +@overload +def decode_iter( + data: bytes, codec_options: "CodecOptions[_DocumentType]" +) -> "Iterator[_DocumentType]": + ... + + +def decode_iter( + data: bytes, codec_options: "Optional[CodecOptions[_DocumentType]]" = None +) -> "Union[Iterator[Dict[str, Any]], Iterator[_DocumentType]]": + """Decode BSON data to multiple documents as a generator. + + Works similarly to the decode_all function, but yields one document at a + time. + + `data` must be a string of concatenated, valid, BSON-encoded + documents. + + :Parameters: + - `data`: BSON data + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. + + .. versionchanged:: 3.0 + Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with + `codec_options`. + + .. versionadded:: 2.8 + """ + opts = codec_options or DEFAULT_CODEC_OPTIONS + if not isinstance(opts, CodecOptions): + raise _CODEC_OPTIONS_TYPE_ERROR + + position = 0 + end = len(data) - 1 + while position < end: + obj_size = _UNPACK_INT_FROM(data, position)[0] + elements = data[position : position + obj_size] + position += obj_size + + yield _bson_to_dict(elements, opts) + + +@overload +def decode_file_iter( + file_obj: Union[BinaryIO, IO], codec_options: None = None +) -> "Iterator[Dict[str, Any]]": + ... + + +@overload +def decode_file_iter( + file_obj: Union[BinaryIO, IO], codec_options: "CodecOptions[_DocumentType]" +) -> "Iterator[_DocumentType]": + ... + + +def decode_file_iter( + file_obj: Union[BinaryIO, IO], codec_options: "Optional[CodecOptions[_DocumentType]]" = None +) -> "Union[Iterator[Dict[str, Any]], Iterator[_DocumentType]]": + """Decode bson data from a file to multiple documents as a generator. + + Works similarly to the decode_all function, but reads from the file object + in chunks and parses bson in chunks, yielding one document at a time. + + :Parameters: + - `file_obj`: A file object containing BSON data. + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. + + .. versionchanged:: 3.0 + Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with + `codec_options`. + + .. versionadded:: 2.8 + """ + opts = codec_options or DEFAULT_CODEC_OPTIONS + while True: + # Read size of next object. + size_data = file_obj.read(4) + if not size_data: + break # Finished with file normally. + elif len(size_data) != 4: + raise InvalidBSON("cut off in middle of objsize") + obj_size = _UNPACK_INT_FROM(size_data, 0)[0] - 4 + elements = size_data + file_obj.read(max(0, obj_size)) + yield _bson_to_dict(elements, opts) + + +def is_valid(bson: bytes) -> bool: + """Check that the given string represents valid :class:`BSON` data. + + Raises :class:`TypeError` if `bson` is not an instance of + :class:`bytes`. Returns ``True`` + if `bson` is valid :class:`BSON`, ``False`` otherwise. + + :Parameters: + - `bson`: the data to be validated + """ + if not isinstance(bson, bytes): + raise TypeError("BSON data must be an instance of a subclass of bytes") + + try: + _bson_to_dict(bson, DEFAULT_CODEC_OPTIONS) + return True + except Exception: + return False + + +class BSON(bytes): + """BSON (Binary JSON) data. + + .. warning:: Using this class to encode and decode BSON adds a performance + cost. For better performance use the module level functions + :func:`encode` and :func:`decode` instead. + """ + + @classmethod + def encode( + cls: Type["BSON"], + document: Mapping[str, Any], + check_keys: bool = False, + codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS, + ) -> "BSON": + """Encode a document to a new :class:`BSON` instance. + + A document can be any mapping type (like :class:`dict`). + + Raises :class:`TypeError` if `document` is not a mapping type, + or contains keys that are not instances of + :class:`str'. Raises :class:`~bson.errors.InvalidDocument` + if `document` cannot be converted to :class:`BSON`. + + :Parameters: + - `document`: mapping type representing a document + - `check_keys` (optional): check if keys start with '$' or + contain '.', raising :class:`~bson.errors.InvalidDocument` in + either case + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. + + .. versionchanged:: 3.0 + Replaced `uuid_subtype` option with `codec_options`. + """ + return cls(encode(document, check_keys, codec_options)) + + def decode(self, codec_options: "CodecOptions[_DocumentType]" = DEFAULT_CODEC_OPTIONS) -> "_DocumentType": # type: ignore[override,assignment] + """Decode this BSON data. + + By default, returns a BSON document represented as a Python + :class:`dict`. To use a different :class:`MutableMapping` class, + configure a :class:`~bson.codec_options.CodecOptions`:: + + >>> import collections # From Python standard library. + >>> import bson + >>> from bson.codec_options import CodecOptions + >>> data = bson.BSON.encode({'a': 1}) + >>> decoded_doc = bson.BSON(data).decode() + <type 'dict'> + >>> options = CodecOptions(document_class=collections.OrderedDict) + >>> decoded_doc = bson.BSON(data).decode(codec_options=options) + >>> type(decoded_doc) + <class 'collections.OrderedDict'> + + :Parameters: + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. + + .. versionchanged:: 3.0 + Removed `compile_re` option: PyMongo now always represents BSON + regular expressions as :class:`~bson.regex.Regex` objects. Use + :meth:`~bson.regex.Regex.try_compile` to attempt to convert from a + BSON regular expression to a Python regular expression object. + + Replaced `as_class`, `tz_aware`, and `uuid_subtype` options with + `codec_options`. + """ + return decode(self, codec_options) + + +def has_c() -> bool: + """Is the C extension installed?""" + return _USE_C + + +def _after_fork() -> None: + """Releases the ObjectID lock child.""" + if ObjectId._inc_lock.locked(): + ObjectId._inc_lock.release() + + +if hasattr(os, "register_at_fork"): + # This will run in the same thread as the fork was called. + # If we fork in a critical region on the same thread, it should break. + # This is fine since we would never call fork directly from a critical region. + os.register_at_fork(after_in_child=_after_fork) diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..08565bdeb36940297ee7d9aafd2bb90cd6b6d84b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/_helpers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/_helpers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0f17fd7f487bcc4cfd62621e79cc99892b8df0ac Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/_helpers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/binary.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/binary.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..367f3e4cabc9dc0078b4a2e192ec0e4f35b94631 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/binary.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/code.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/code.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9cf4a33eddebcbf7077c7918c1b21d76cbb3eef5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/code.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/codec_options.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/codec_options.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e896a882af691d61b1bfefe4a9eab44ebb47493 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/codec_options.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/datetime_ms.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/datetime_ms.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2dc6eacf7b04305cf77062cbe8cfdc7afa2560d8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/datetime_ms.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/dbref.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/dbref.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..719aa6cf79f9199d537bea1752a1a2480bd5eb55 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/dbref.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/decimal128.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/decimal128.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a148501f9cd626ed099ea7e29ea10f5f3e235885 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/decimal128.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/errors.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/errors.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b24620b7cf23cfac564492e52583ede837a77de Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/errors.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/int64.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/int64.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3f7c55559b76059a7df4d0f5e4ee7271c17eed99 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/int64.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/json_util.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/json_util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..629b8b178c7509512131153ff6c28f1a95c85674 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/json_util.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/max_key.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/max_key.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e754085ceb3f02907aeb2dda58e96fffd0c1f59 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/max_key.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/min_key.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/min_key.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..86c529efb3fdf3ae5b5688ff99843527939b37f6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/min_key.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/objectid.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/objectid.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6dbf0aa919e7b99e040fe8ceb80387e2e225c2de Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/objectid.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/raw_bson.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/raw_bson.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ddc2b1108c71f6426a0d2ca8c351181abd70befc Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/raw_bson.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/regex.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/regex.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..86733aea7d23f2b70ebaf4d7372fa82d72947e0d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/regex.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/son.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/son.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..87226cf5e393b84738968fc61b6e98af171ec4fa Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/son.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/timestamp.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/timestamp.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bee89719c5d5ee51a63ca80bdc91e304c39d4fb6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/timestamp.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/typings.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/typings.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dced80762e665e183d353332d984f7be99ee4905 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/typings.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/__pycache__/tz_util.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/bson/__pycache__/tz_util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f09160bad1003913b2a1e7e08abab932e8396c1b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/__pycache__/tz_util.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/bson/_cbson.cpython-38-x86_64-linux-gnu.so b/backend/test/lib/python3.8/site-packages/bson/_cbson.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000000000000000000000000000000000000..be4c952b208e770989308addead4f27a24249599 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/bson/_cbson.cpython-38-x86_64-linux-gnu.so differ diff --git a/backend/test/lib/python3.8/site-packages/bson/_cbsonmodule.c b/backend/test/lib/python3.8/site-packages/bson/_cbsonmodule.c new file mode 100644 index 0000000000000000000000000000000000000000..ce5c36bf9367b786e485e0214e51acd221793a0d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/_cbsonmodule.c @@ -0,0 +1,3264 @@ +/* + * Copyright 2009-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * This file contains C implementations of some of the functions + * needed by the bson module. If possible, these implementations + * should be used to speed up BSON encoding and decoding. + */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#include "datetime.h" + +#include "buffer.h" +#include "time64.h" + +#define _CBSON_MODULE +#include "_cbsonmodule.h" + +/* New module state and initialization code. + * See the module-initialization-and-state + * section in the following doc: + * http://docs.python.org/release/3.1.3/howto/cporting.html + * which references the following pep: + * http://www.python.org/dev/peps/pep-3121/ + * */ +struct module_state { + PyObject* Binary; + PyObject* Code; + PyObject* ObjectId; + PyObject* DBRef; + PyObject* Regex; + PyObject* UUID; + PyObject* Timestamp; + PyObject* MinKey; + PyObject* MaxKey; + PyObject* UTC; + PyTypeObject* REType; + PyObject* BSONInt64; + PyObject* Decimal128; + PyObject* Mapping; + PyObject* DatetimeMS; + PyObject* _min_datetime_ms; + PyObject* _max_datetime_ms; + PyObject* _type_marker_str; + PyObject* _flags_str; + PyObject* _pattern_str; + PyObject* _encoder_map_str; + PyObject* _decoder_map_str; + PyObject* _fallback_encoder_str; + PyObject* _raw_str; + PyObject* _subtype_str; + PyObject* _binary_str; + PyObject* _scope_str; + PyObject* _inc_str; + PyObject* _time_str; + PyObject* _bid_str; + PyObject* _replace_str; + PyObject* _astimezone_str; + PyObject* _id_str; + PyObject* _dollar_ref_str; + PyObject* _dollar_id_str; + PyObject* _dollar_db_str; + PyObject* _tzinfo_str; + PyObject* _as_doc_str; + PyObject* _utcoffset_str; + PyObject* _from_uuid_str; + PyObject* _as_uuid_str; + PyObject* _from_bid_str; +}; + +#define GETSTATE(m) ((struct module_state*)PyModule_GetState(m)) + +/* Maximum number of regex flags */ +#define FLAGS_SIZE 7 + +/* Default UUID representation type code. */ +#define PYTHON_LEGACY 3 + +/* Other UUID representations. */ +#define STANDARD 4 +#define JAVA_LEGACY 5 +#define CSHARP_LEGACY 6 +#define UNSPECIFIED 0 + +#define BSON_MAX_SIZE 2147483647 +/* The smallest possible BSON document, i.e. "{}" */ +#define BSON_MIN_SIZE 5 + +/* Datetime codec options */ +#define DATETIME 1 +#define DATETIME_CLAMP 2 +#define DATETIME_MS 3 +#define DATETIME_AUTO 4 + +/* Converts integer to its string representation in decimal notation. */ +extern int cbson_long_long_to_str(long long num, char* str, size_t size) { + // Buffer should fit 64-bit signed integer + if (size < 21) { + PyErr_Format( + PyExc_RuntimeError, + "Buffer too small to hold long long: %d < 21", size); + return -1; + } + int index = 0; + int sign = 1; + // Convert to unsigned to handle -LLONG_MIN overflow + unsigned long long absNum; + // Handle the case of 0 + if (num == 0) { + str[index++] = '0'; + str[index] = '\0'; + return 0; + } + // Handle negative numbers + if (num < 0) { + sign = -1; + absNum = 0ULL - (unsigned long long)num; + } else { + absNum = (unsigned long long)num; + } + // Convert the number to string + unsigned long long digit; + while (absNum > 0) { + digit = absNum % 10ULL; + str[index++] = (char)digit + '0'; // Convert digit to character + absNum /= 10; + } + // Add minus sign if negative + if (sign == -1) { + str[index++] = '-'; + } + str[index] = '\0'; // Null terminator + // Reverse the string + int start = 0; + int end = index - 1; + while (start < end) { + char temp = str[start]; + str[start++] = str[end]; + str[end--] = temp; + } + return 0; +} + +static PyObject* _test_long_long_to_str(PyObject* self, PyObject* args) { + // Test extreme values + Py_ssize_t maxNum = PY_SSIZE_T_MAX; + Py_ssize_t minNum = PY_SSIZE_T_MIN; + Py_ssize_t num; + char str_1[BUF_SIZE]; + char str_2[BUF_SIZE]; + int res = LL2STR(str_1, (long long)minNum); + if (res == -1) { + return NULL; + } + INT2STRING(str_2, (long long)minNum); + if (strcmp(str_1, str_2) != 0) { + PyErr_Format( + PyExc_RuntimeError, + "LL2STR != INT2STRING: %s != %s", str_1, str_2); + return NULL; + } + LL2STR(str_1, (long long)maxNum); + INT2STRING(str_2, (long long)maxNum); + if (strcmp(str_1, str_2) != 0) { + PyErr_Format( + PyExc_RuntimeError, + "LL2STR != INT2STRING: %s != %s", str_1, str_2); + return NULL; + } + + // Test common values + for (num = 0; num < 10000; num++) { + char str_1[BUF_SIZE]; + char str_2[BUF_SIZE]; + LL2STR(str_1, (long long)num); + INT2STRING(str_2, (long long)num); + if (strcmp(str_1, str_2) != 0) { + PyErr_Format( + PyExc_RuntimeError, + "LL2STR != INT2STRING: %s != %s", str_1, str_2); + return NULL; + } + } + + return args; +} + +/* Get an error class from the bson.errors module. + * + * Returns a new ref */ +static PyObject* _error(char* name) { + PyObject* error; + PyObject* errors = PyImport_ImportModule("bson.errors"); + if (!errors) { + return NULL; + } + error = PyObject_GetAttrString(errors, name); + Py_DECREF(errors); + return error; +} + +/* Safely downcast from Py_ssize_t to int, setting an + * exception and returning -1 on error. */ +static int +_downcast_and_check(Py_ssize_t size, uint8_t extra) { + if (size > BSON_MAX_SIZE || ((BSON_MAX_SIZE - extra) < size)) { + PyObject* InvalidStringData = _error("InvalidStringData"); + if (InvalidStringData) { + PyErr_SetString(InvalidStringData, + "String length must be <= 2147483647"); + Py_DECREF(InvalidStringData); + } + return -1; + } + return (int)size + extra; +} + +static PyObject* elements_to_dict(PyObject* self, const char* string, + unsigned max, + const codec_options_t* options); + +static int _write_element_to_buffer(PyObject* self, buffer_t buffer, + int type_byte, PyObject* value, + unsigned char check_keys, + const codec_options_t* options, + unsigned char in_custom_call, + unsigned char in_fallback_call); + +/* Write a RawBSONDocument to the buffer. + * Returns the number of bytes written or 0 on failure. + */ +static int write_raw_doc(buffer_t buffer, PyObject* raw, PyObject* _raw); + +/* Date stuff */ +static PyObject* datetime_from_millis(long long millis) { + /* To encode a datetime instance like datetime(9999, 12, 31, 23, 59, 59, 999999) + * we follow these steps: + * 1. Calculate a timestamp in seconds: 253402300799 + * 2. Multiply that by 1000: 253402300799000 + * 3. Add in microseconds divided by 1000 253402300799999 + * + * (Note: BSON doesn't support microsecond accuracy, hence the rounding.) + * + * To decode we could do: + * 1. Get seconds: timestamp / 1000: 253402300799 + * 2. Get micros: (timestamp % 1000) * 1000: 999000 + * Resulting in datetime(9999, 12, 31, 23, 59, 59, 999000) -- the expected result + * + * Now what if the we encode (1, 1, 1, 1, 1, 1, 111111)? + * 1. and 2. gives: -62135593139000 + * 3. Gives us: -62135593138889 + * + * Now decode: + * 1. Gives us: -62135593138 + * 2. Gives us: -889000 + * Resulting in datetime(1, 1, 1, 1, 1, 2, 15888216) -- an invalid result + * + * If instead to decode we do: + * diff = ((millis % 1000) + 1000) % 1000: 111 + * seconds = (millis - diff) / 1000: -62135593139 + * micros = diff * 1000 111000 + * Resulting in datetime(1, 1, 1, 1, 1, 1, 111000) -- the expected result + */ + int diff = (int)(((millis % 1000) + 1000) % 1000); + int microseconds = diff * 1000; + Time64_T seconds = (millis - diff) / 1000; + struct TM timeinfo; + cbson_gmtime64_r(&seconds, &timeinfo); + + return PyDateTime_FromDateAndTime(timeinfo.tm_year + 1900, + timeinfo.tm_mon + 1, + timeinfo.tm_mday, + timeinfo.tm_hour, + timeinfo.tm_min, + timeinfo.tm_sec, + microseconds); +} + +static long long millis_from_datetime(PyObject* datetime) { + struct TM timeinfo; + long long millis; + + timeinfo.tm_year = PyDateTime_GET_YEAR(datetime) - 1900; + timeinfo.tm_mon = PyDateTime_GET_MONTH(datetime) - 1; + timeinfo.tm_mday = PyDateTime_GET_DAY(datetime); + timeinfo.tm_hour = PyDateTime_DATE_GET_HOUR(datetime); + timeinfo.tm_min = PyDateTime_DATE_GET_MINUTE(datetime); + timeinfo.tm_sec = PyDateTime_DATE_GET_SECOND(datetime); + + millis = cbson_timegm64(&timeinfo) * 1000; + millis += PyDateTime_DATE_GET_MICROSECOND(datetime) / 1000; + return millis; +} + +/* Extended-range datetime, returns a DatetimeMS object with millis */ +static PyObject* datetime_ms_from_millis(PyObject* self, long long millis){ + // Allocate a new DatetimeMS object. + struct module_state *state = GETSTATE(self); + + PyObject* dt; + PyObject* ll_millis; + + if (!(ll_millis = PyLong_FromLongLong(millis))){ + return NULL; + } + dt = PyObject_CallFunctionObjArgs(state->DatetimeMS, ll_millis, NULL); + Py_DECREF(ll_millis); + return dt; +} + +/* Extended-range datetime, takes a DatetimeMS object and extracts the long long value. */ +static int millis_from_datetime_ms(PyObject* dt, long long* out){ + PyObject* ll_millis; + long long millis; + + if (!(ll_millis = PyNumber_Long(dt))){ + return 0; + } + millis = PyLong_AsLongLong(ll_millis); + Py_DECREF(ll_millis); + if (millis == -1 && PyErr_Occurred()) { /* Overflow */ + PyErr_SetString(PyExc_OverflowError, + "MongoDB datetimes can only handle up to 8-byte ints"); + return 0; + } + *out = millis; + return 1; +} + +/* Just make this compatible w/ the old API. */ +int buffer_write_bytes(buffer_t buffer, const char* data, int size) { + if (pymongo_buffer_write(buffer, data, size)) { + return 0; + } + return 1; +} + +int buffer_write_double(buffer_t buffer, double data) { + double data_le = BSON_DOUBLE_TO_LE(data); + return buffer_write_bytes(buffer, (const char*)&data_le, 8); +} + +int buffer_write_int32(buffer_t buffer, int32_t data) { + uint32_t data_le = BSON_UINT32_TO_LE(data); + return buffer_write_bytes(buffer, (const char*)&data_le, 4); +} + +int buffer_write_int64(buffer_t buffer, int64_t data) { + uint64_t data_le = BSON_UINT64_TO_LE(data); + return buffer_write_bytes(buffer, (const char*)&data_le, 8); +} + +void buffer_write_int32_at_position(buffer_t buffer, + int position, + int32_t data) { + uint32_t data_le = BSON_UINT32_TO_LE(data); + memcpy(pymongo_buffer_get_buffer(buffer) + position, &data_le, 4); +} + +static int write_unicode(buffer_t buffer, PyObject* py_string) { + int size; + const char* data; + PyObject* encoded = PyUnicode_AsUTF8String(py_string); + if (!encoded) { + return 0; + } + data = PyBytes_AS_STRING(encoded); + if (!data) + goto unicodefail; + + if ((size = _downcast_and_check(PyBytes_GET_SIZE(encoded), 1)) == -1) + goto unicodefail; + + if (!buffer_write_int32(buffer, (int32_t)size)) + goto unicodefail; + + if (!buffer_write_bytes(buffer, data, size)) + goto unicodefail; + + Py_DECREF(encoded); + return 1; + +unicodefail: + Py_DECREF(encoded); + return 0; +} + +/* returns 0 on failure */ +static int write_string(buffer_t buffer, PyObject* py_string) { + int size; + const char* data; + if (PyUnicode_Check(py_string)){ + return write_unicode(buffer, py_string); + } + data = PyBytes_AsString(py_string); + if (!data) { + return 0; + } + + if ((size = _downcast_and_check(PyBytes_Size(py_string), 1)) == -1) + return 0; + + if (!buffer_write_int32(buffer, (int32_t)size)) { + return 0; + } + if (!buffer_write_bytes(buffer, data, size)) { + return 0; + } + return 1; +} + +/* + * Are we in the main interpreter or a sub-interpreter? + * Useful for deciding if we can use cached pure python + * types in mod_wsgi. + */ +static int +_in_main_interpreter(void) { + static PyInterpreterState* main_interpreter = NULL; + PyInterpreterState* interpreter; + + if (main_interpreter == NULL) { + interpreter = PyInterpreterState_Head(); + + while (PyInterpreterState_Next(interpreter)) + interpreter = PyInterpreterState_Next(interpreter); + + main_interpreter = interpreter; + } + + return (main_interpreter == PyThreadState_Get()->interp); +} + +/* + * Get a reference to a pure python type. If we are in the + * main interpreter return the cached object, otherwise import + * the object we need and return it instead. + */ +static PyObject* +_get_object(PyObject* object, char* module_name, char* object_name) { + if (_in_main_interpreter()) { + Py_XINCREF(object); + return object; + } else { + PyObject* imported = NULL; + PyObject* module = PyImport_ImportModule(module_name); + if (!module) + return NULL; + imported = PyObject_GetAttrString(module, object_name); + Py_DECREF(module); + return imported; + } +} + +/* Load a Python object to cache. + * + * Returns non-zero on failure. */ +static int _load_object(PyObject** object, char* module_name, char* object_name) { + PyObject* module; + + module = PyImport_ImportModule(module_name); + if (!module) { + return 1; + } + + *object = PyObject_GetAttrString(module, object_name); + Py_DECREF(module); + + return (*object) ? 0 : 2; +} + +/* Load all Python objects to cache. + * + * Returns non-zero on failure. */ +static int _load_python_objects(PyObject* module) { + PyObject* empty_string = NULL; + PyObject* re_compile = NULL; + PyObject* compiled = NULL; + struct module_state *state = GETSTATE(module); + + /* Cache commonly used attribute names to improve performance. */ + if (!((state->_type_marker_str = PyUnicode_FromString("_type_marker")) && + (state->_flags_str = PyUnicode_FromString("flags")) && + (state->_pattern_str = PyUnicode_FromString("pattern")) && + (state->_encoder_map_str = PyUnicode_FromString("_encoder_map")) && + (state->_decoder_map_str = PyUnicode_FromString("_decoder_map")) && + (state->_fallback_encoder_str = PyUnicode_FromString("_fallback_encoder")) && + (state->_raw_str = PyUnicode_FromString("raw")) && + (state->_subtype_str = PyUnicode_FromString("subtype")) && + (state->_binary_str = PyUnicode_FromString("binary")) && + (state->_scope_str = PyUnicode_FromString("scope")) && + (state->_inc_str = PyUnicode_FromString("inc")) && + (state->_time_str = PyUnicode_FromString("time")) && + (state->_bid_str = PyUnicode_FromString("bid")) && + (state->_replace_str = PyUnicode_FromString("replace")) && + (state->_astimezone_str = PyUnicode_FromString("astimezone")) && + (state->_id_str = PyUnicode_FromString("_id")) && + (state->_dollar_ref_str = PyUnicode_FromString("$ref")) && + (state->_dollar_id_str = PyUnicode_FromString("$id")) && + (state->_dollar_db_str = PyUnicode_FromString("$db")) && + (state->_tzinfo_str = PyUnicode_FromString("tzinfo")) && + (state->_as_doc_str = PyUnicode_FromString("as_doc")) && + (state->_utcoffset_str = PyUnicode_FromString("utcoffset")) && + (state->_from_uuid_str = PyUnicode_FromString("from_uuid")) && + (state->_as_uuid_str = PyUnicode_FromString("as_uuid")) && + (state->_from_bid_str = PyUnicode_FromString("from_bid")))) { + return 1; + } + + if (_load_object(&state->Binary, "bson.binary", "Binary") || + _load_object(&state->Code, "bson.code", "Code") || + _load_object(&state->ObjectId, "bson.objectid", "ObjectId") || + _load_object(&state->DBRef, "bson.dbref", "DBRef") || + _load_object(&state->Timestamp, "bson.timestamp", "Timestamp") || + _load_object(&state->MinKey, "bson.min_key", "MinKey") || + _load_object(&state->MaxKey, "bson.max_key", "MaxKey") || + _load_object(&state->UTC, "bson.tz_util", "utc") || + _load_object(&state->Regex, "bson.regex", "Regex") || + _load_object(&state->BSONInt64, "bson.int64", "Int64") || + _load_object(&state->Decimal128, "bson.decimal128", "Decimal128") || + _load_object(&state->UUID, "uuid", "UUID") || + _load_object(&state->Mapping, "collections.abc", "Mapping") || + _load_object(&state->DatetimeMS, "bson.datetime_ms", "DatetimeMS") || + _load_object(&state->_min_datetime_ms, "bson.datetime_ms", "_min_datetime_ms") || + _load_object(&state->_max_datetime_ms, "bson.datetime_ms", "_max_datetime_ms")) { + return 1; + } + /* Reload our REType hack too. */ + empty_string = PyBytes_FromString(""); + if (empty_string == NULL) { + state->REType = NULL; + return 1; + } + + if (_load_object(&re_compile, "re", "compile")) { + state->REType = NULL; + Py_DECREF(empty_string); + return 1; + } + + compiled = PyObject_CallFunction(re_compile, "O", empty_string); + Py_DECREF(re_compile); + if (compiled == NULL) { + state->REType = NULL; + Py_DECREF(empty_string); + return 1; + } + Py_INCREF(Py_TYPE(compiled)); + state->REType = Py_TYPE(compiled); + Py_DECREF(empty_string); + Py_DECREF(compiled); + return 0; +} + +/* + * Get the _type_marker from an Object. + * + * Return the type marker, 0 if there is no marker, or -1 on failure. + */ +static long _type_marker(PyObject* object, PyObject* _type_marker_str) { + PyObject* type_marker = NULL; + long type = 0; + + if (PyObject_HasAttr(object, _type_marker_str)) { + type_marker = PyObject_GetAttr(object, _type_marker_str); + if (type_marker == NULL) { + return -1; + } + } + + /* + * Python objects with broken __getattr__ implementations could return + * arbitrary types for a call to PyObject_GetAttrString. For example + * pymongo.database.Database returns a new Collection instance for + * __getattr__ calls with names that don't match an existing attribute + * or method. In some cases "value" could be a subtype of something + * we know how to serialize. Make a best effort to encode these types. + */ + if (type_marker && PyLong_CheckExact(type_marker)) { + type = PyLong_AsLong(type_marker); + Py_DECREF(type_marker); + } else { + Py_XDECREF(type_marker); + } + + return type; +} + +/* Fill out a type_registry_t* from a TypeRegistry object. + * + * Return 1 on success. options->document_class is a new reference. + * Return 0 on failure. + */ +int cbson_convert_type_registry(PyObject* registry_obj, type_registry_t* registry, PyObject* _encoder_map_str, PyObject* _decoder_map_str, PyObject* _fallback_encoder_str) { + registry->encoder_map = NULL; + registry->decoder_map = NULL; + registry->fallback_encoder = NULL; + registry->registry_obj = NULL; + + registry->encoder_map = PyObject_GetAttr(registry_obj, _encoder_map_str); + if (registry->encoder_map == NULL) { + goto fail; + } + registry->is_encoder_empty = (PyDict_Size(registry->encoder_map) == 0); + + registry->decoder_map = PyObject_GetAttr(registry_obj, _decoder_map_str); + if (registry->decoder_map == NULL) { + goto fail; + } + registry->is_decoder_empty = (PyDict_Size(registry->decoder_map) == 0); + + registry->fallback_encoder = PyObject_GetAttr(registry_obj, _fallback_encoder_str); + if (registry->fallback_encoder == NULL) { + goto fail; + } + registry->has_fallback_encoder = (registry->fallback_encoder != Py_None); + + registry->registry_obj = registry_obj; + Py_INCREF(registry->registry_obj); + return 1; + +fail: + Py_XDECREF(registry->encoder_map); + Py_XDECREF(registry->decoder_map); + Py_XDECREF(registry->fallback_encoder); + return 0; +} + +/* Fill out a codec_options_t* from a CodecOptions object. + * + * Return 1 on success. options->document_class is a new reference. + * Return 0 on failure. + */ +int convert_codec_options(PyObject* self, PyObject* options_obj, codec_options_t* options) { + PyObject* type_registry_obj = NULL; + struct module_state *state = GETSTATE(self); + long type_marker; + + options->unicode_decode_error_handler = NULL; + + if (!PyArg_ParseTuple(options_obj, "ObbzOOb", + &options->document_class, + &options->tz_aware, + &options->uuid_rep, + &options->unicode_decode_error_handler, + &options->tzinfo, + &type_registry_obj, + &options->datetime_conversion)) { + return 0; + } + + type_marker = _type_marker(options->document_class, + state->_type_marker_str); + if (type_marker < 0) { + return 0; + } + + if (!cbson_convert_type_registry(type_registry_obj, + &options->type_registry, state->_encoder_map_str, state->_decoder_map_str, state->_fallback_encoder_str)) { + return 0; + } + + options->is_raw_bson = (101 == type_marker); + options->options_obj = options_obj; + + Py_INCREF(options->options_obj); + Py_INCREF(options->document_class); + Py_INCREF(options->tzinfo); + + return 1; +} + +void destroy_codec_options(codec_options_t* options) { + Py_CLEAR(options->document_class); + Py_CLEAR(options->tzinfo); + Py_CLEAR(options->options_obj); + Py_CLEAR(options->type_registry.registry_obj); + Py_CLEAR(options->type_registry.encoder_map); + Py_CLEAR(options->type_registry.decoder_map); + Py_CLEAR(options->type_registry.fallback_encoder); +} + +static int write_element_to_buffer(PyObject* self, buffer_t buffer, + int type_byte, PyObject* value, + unsigned char check_keys, + const codec_options_t* options, + unsigned char in_custom_call, + unsigned char in_fallback_call) { + int result = 0; + if(Py_EnterRecursiveCall(" while encoding an object to BSON ")) { + return 0; + } + result = _write_element_to_buffer(self, buffer, type_byte, + value, check_keys, options, + in_custom_call, in_fallback_call); + Py_LeaveRecursiveCall(); + return result; +} + +static void +_set_cannot_encode(PyObject* value) { + if (PyLong_Check(value)) { + if ((PyLong_AsLongLong(value) == -1) && PyErr_Occurred()) { + return PyErr_SetString(PyExc_OverflowError, + "MongoDB can only handle up to 8-byte ints"); + } + } + + PyObject* type = NULL; + PyObject* InvalidDocument = _error("InvalidDocument"); + if (InvalidDocument == NULL) { + goto error; + } + + type = PyObject_Type(value); + if (type == NULL) { + goto error; + } + PyErr_Format(InvalidDocument, "cannot encode object: %R, of type: %R", + value, type); +error: + Py_XDECREF(type); + Py_XDECREF(InvalidDocument); +} + +/* + * Encode a builtin Python regular expression or our custom Regex class. + * + * Sets exception and returns 0 on failure. + */ +static int _write_regex_to_buffer( + buffer_t buffer, int type_byte, PyObject* value, PyObject* _flags_str, PyObject* _pattern_str) { + + PyObject* py_flags; + PyObject* py_pattern; + PyObject* encoded_pattern; + PyObject* decoded_pattern; + long int_flags; + char flags[FLAGS_SIZE]; + char check_utf8 = 0; + const char* pattern_data; + int pattern_length, flags_length; + + /* + * Both the builtin re type and our Regex class have attributes + * "flags" and "pattern". + */ + py_flags = PyObject_GetAttr(value, _flags_str); + if (!py_flags) { + return 0; + } + int_flags = PyLong_AsLong(py_flags); + Py_DECREF(py_flags); + if (int_flags == -1 && PyErr_Occurred()) { + return 0; + } + py_pattern = PyObject_GetAttr(value, _pattern_str); + if (!py_pattern) { + return 0; + } + + if (PyUnicode_Check(py_pattern)) { + encoded_pattern = PyUnicode_AsUTF8String(py_pattern); + Py_DECREF(py_pattern); + if (!encoded_pattern) { + return 0; + } + } else { + encoded_pattern = py_pattern; + check_utf8 = 1; + } + + if (!(pattern_data = PyBytes_AsString(encoded_pattern))) { + Py_DECREF(encoded_pattern); + return 0; + } + if ((pattern_length = _downcast_and_check(PyBytes_Size(encoded_pattern), 0)) == -1) { + Py_DECREF(encoded_pattern); + return 0; + } + + if (strlen(pattern_data) != (size_t) pattern_length){ + PyObject* InvalidDocument = _error("InvalidDocument"); + if (InvalidDocument) { + PyErr_SetString(InvalidDocument, + "regex patterns must not contain the NULL byte"); + Py_DECREF(InvalidDocument); + } + Py_DECREF(encoded_pattern); + return 0; + } + + if (check_utf8) { + decoded_pattern = PyUnicode_DecodeUTF8(pattern_data, (Py_ssize_t) pattern_length, NULL); + if (decoded_pattern == NULL) { + PyErr_Clear(); + PyObject* InvalidStringData = _error("InvalidStringData"); + if (InvalidStringData) { + PyErr_SetString(InvalidStringData, + "regex patterns must be valid UTF-8"); + Py_DECREF(InvalidStringData); + } + Py_DECREF(encoded_pattern); + return 0; + } + Py_DECREF(decoded_pattern); + } + + if (!buffer_write_bytes(buffer, pattern_data, pattern_length + 1)) { + Py_DECREF(encoded_pattern); + return 0; + } + Py_DECREF(encoded_pattern); + + flags[0] = 0; + + if (int_flags & 2) { + STRCAT(flags, FLAGS_SIZE, "i"); + } + if (int_flags & 4) { + STRCAT(flags, FLAGS_SIZE, "l"); + } + if (int_flags & 8) { + STRCAT(flags, FLAGS_SIZE, "m"); + } + if (int_flags & 16) { + STRCAT(flags, FLAGS_SIZE, "s"); + } + if (int_flags & 32) { + STRCAT(flags, FLAGS_SIZE, "u"); + } + if (int_flags & 64) { + STRCAT(flags, FLAGS_SIZE, "x"); + } + flags_length = (int)strlen(flags) + 1; + if (!buffer_write_bytes(buffer, flags, flags_length)) { + return 0; + } + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x0B; + return 1; +} + +/* Write a single value to the buffer (also write its type_byte, for which + * space has already been reserved. + * + * returns 0 on failure */ +static int _write_element_to_buffer(PyObject* self, buffer_t buffer, + int type_byte, PyObject* value, + unsigned char check_keys, + const codec_options_t* options, + unsigned char in_custom_call, + unsigned char in_fallback_call) { + struct module_state *state = GETSTATE(self); + PyObject* mapping_type; + PyObject* new_value = NULL; + int retval; + PyObject* uuid_type; + int is_list; + /* + * Don't use PyObject_IsInstance for our custom types. It causes + * problems with python sub interpreters. Our custom types should + * have a _type_marker attribute, which we can switch on instead. + */ + long type = _type_marker(value, state->_type_marker_str); + if (type < 0) { + return 0; + } + + switch (type) { + case 5: + { + /* Binary */ + PyObject* subtype_object; + char subtype; + const char* data; + int size; + + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x05; + subtype_object = PyObject_GetAttr(value, state->_subtype_str); + if (!subtype_object) { + return 0; + } + subtype = (char)PyLong_AsLong(subtype_object); + if (subtype == -1) { + Py_DECREF(subtype_object); + return 0; + } + size = _downcast_and_check(PyBytes_Size(value), 0); + if (size == -1) { + Py_DECREF(subtype_object); + return 0; + } + + Py_DECREF(subtype_object); + if (subtype == 2) { + int other_size = _downcast_and_check(PyBytes_Size(value), 4); + if (other_size == -1) + return 0; + if (!buffer_write_int32(buffer, other_size)) { + return 0; + } + if (!buffer_write_bytes(buffer, &subtype, 1)) { + return 0; + } + } + if (!buffer_write_int32(buffer, size)) { + return 0; + } + if (subtype != 2) { + if (!buffer_write_bytes(buffer, &subtype, 1)) { + return 0; + } + } + data = PyBytes_AsString(value); + if (!data) { + return 0; + } + if (!buffer_write_bytes(buffer, data, size)) { + return 0; + } + return 1; + } + case 7: + { + /* ObjectId */ + const char* data; + PyObject* pystring = PyObject_GetAttr(value, state->_binary_str); + if (!pystring) { + return 0; + } + data = PyBytes_AsString(pystring); + if (!data) { + Py_DECREF(pystring); + return 0; + } + if (!buffer_write_bytes(buffer, data, 12)) { + Py_DECREF(pystring); + return 0; + } + Py_DECREF(pystring); + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x07; + return 1; + } + case 11: + { + /* Regex */ + return _write_regex_to_buffer(buffer, type_byte, value, state->_flags_str, state->_pattern_str); + } + case 13: + { + /* Code */ + int start_position, + length_location, + length; + + PyObject* scope = PyObject_GetAttr(value, state->_scope_str); + if (!scope) { + return 0; + } + + if (scope == Py_None) { + Py_DECREF(scope); + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x0D; + return write_string(buffer, value); + } + + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x0F; + + start_position = pymongo_buffer_get_position(buffer); + /* save space for length */ + length_location = pymongo_buffer_save_space(buffer, 4); + if (length_location == -1) { + Py_DECREF(scope); + return 0; + } + + if (!write_string(buffer, value)) { + Py_DECREF(scope); + return 0; + } + + if (!write_dict(self, buffer, scope, 0, options, 0)) { + Py_DECREF(scope); + return 0; + } + Py_DECREF(scope); + + length = pymongo_buffer_get_position(buffer) - start_position; + buffer_write_int32_at_position( + buffer, length_location, (int32_t)length); + return 1; + } + case 17: + { + /* Timestamp */ + PyObject* obj; + unsigned long i; + + obj = PyObject_GetAttr(value, state->_inc_str); + if (!obj) { + return 0; + } + i = PyLong_AsUnsignedLong(obj); + Py_DECREF(obj); + if (i == (unsigned long)-1 && PyErr_Occurred()) { + return 0; + } + if (!buffer_write_int32(buffer, (int32_t)i)) { + return 0; + } + + obj = PyObject_GetAttr(value, state->_time_str); + if (!obj) { + return 0; + } + i = PyLong_AsUnsignedLong(obj); + Py_DECREF(obj); + if (i == (unsigned long)-1 && PyErr_Occurred()) { + return 0; + } + if (!buffer_write_int32(buffer, (int32_t)i)) { + return 0; + } + + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x11; + return 1; + } + case 18: + { + /* Int64 */ + const long long ll = PyLong_AsLongLong(value); + if (PyErr_Occurred()) { /* Overflow */ + PyErr_SetString(PyExc_OverflowError, + "MongoDB can only handle up to 8-byte ints"); + return 0; + } + if (!buffer_write_int64(buffer, (int64_t)ll)) { + return 0; + } + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x12; + return 1; + } + case 19: + { + /* Decimal128 */ + const char* data; + PyObject* pystring = PyObject_GetAttr(value, state->_bid_str); + if (!pystring) { + return 0; + } + data = PyBytes_AsString(pystring); + if (!data) { + Py_DECREF(pystring); + return 0; + } + if (!buffer_write_bytes(buffer, data, 16)) { + Py_DECREF(pystring); + return 0; + } + Py_DECREF(pystring); + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x13; + return 1; + } + case 100: + { + /* DBRef */ + PyObject* as_doc = PyObject_CallMethodObjArgs(value, state->_as_doc_str, NULL); + if (!as_doc) { + return 0; + } + if (!write_dict(self, buffer, as_doc, 0, options, 0)) { + Py_DECREF(as_doc); + return 0; + } + Py_DECREF(as_doc); + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x03; + return 1; + } + case 101: + { + /* RawBSONDocument */ + if (!write_raw_doc(buffer, value, state->_raw_str)) { + return 0; + } + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x03; + return 1; + } + case 255: + { + /* MinKey */ + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0xFF; + return 1; + } + case 127: + { + /* MaxKey */ + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x7F; + return 1; + } + } + + /* No _type_marker attribute or not one of our types. */ + + if (PyBool_Check(value)) { + const char c = (value == Py_True) ? 0x01 : 0x00; + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x08; + return buffer_write_bytes(buffer, &c, 1); + } + else if (PyLong_Check(value)) { + const long long long_long_value = PyLong_AsLongLong(value); + if (long_long_value == -1 && PyErr_Occurred()) { + /* Ignore error and give the fallback_encoder a chance. */ + PyErr_Clear(); + } else if (-2147483648LL <= long_long_value && long_long_value <= 2147483647LL) { + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x10; + return buffer_write_int32(buffer, (int32_t)long_long_value); + } else { + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x12; + return buffer_write_int64(buffer, (int64_t)long_long_value); + } + } else if (PyFloat_Check(value)) { + const double d = PyFloat_AsDouble(value); + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x01; + return buffer_write_double(buffer, d); + } else if (value == Py_None) { + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x0A; + return 1; + } else if (PyDict_Check(value)) { + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x03; + return write_dict(self, buffer, value, check_keys, options, 0); + } else if ((is_list = PyList_Check(value)) || PyTuple_Check(value)) { + Py_ssize_t items, i; + int start_position, + length_location, + length; + char zero = 0; + + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x04; + start_position = pymongo_buffer_get_position(buffer); + + /* save space for length */ + length_location = pymongo_buffer_save_space(buffer, 4); + if (length_location == -1) { + return 0; + } + if (is_list) { + items = PyList_Size(value); + } else { + items = PyTuple_Size(value); + } + if (items > BSON_MAX_SIZE) { + PyObject* BSONError = _error("BSONError"); + if (BSONError) { + PyErr_SetString(BSONError, + "Too many items to serialize."); + Py_DECREF(BSONError); + } + return 0; + } + for(i = 0; i < items; i++) { + int list_type_byte = pymongo_buffer_save_space(buffer, 1); + char name[BUF_SIZE]; + PyObject* item_value; + + if (list_type_byte == -1) { + return 0; + } + int res = LL2STR(name, (long long)i); + if (res == -1) { + return 0; + } + if (!buffer_write_bytes(buffer, name, (int)strlen(name) + 1)) { + return 0; + } + if (is_list) { + item_value = PyList_GET_ITEM(value, i); + } else { + item_value = PyTuple_GET_ITEM(value, i); + } + if (!item_value) { + return 0; + } + if (!write_element_to_buffer(self, buffer, list_type_byte, + item_value, check_keys, options, + 0, 0)) { + return 0; + } + } + + /* write null byte and fill in length */ + if (!buffer_write_bytes(buffer, &zero, 1)) { + return 0; + } + length = pymongo_buffer_get_position(buffer) - start_position; + buffer_write_int32_at_position( + buffer, length_location, (int32_t)length); + return 1; + /* Python3 special case. Store bytes as BSON binary subtype 0. */ + } else if (PyBytes_Check(value)) { + char subtype = 0; + int size; + const char* data = PyBytes_AS_STRING(value); + if (!data) + return 0; + if ((size = _downcast_and_check(PyBytes_GET_SIZE(value), 0)) == -1) + return 0; + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x05; + if (!buffer_write_int32(buffer, (int32_t)size)) { + return 0; + } + if (!buffer_write_bytes(buffer, &subtype, 1)) { + return 0; + } + if (!buffer_write_bytes(buffer, data, size)) { + return 0; + } + return 1; + } else if (PyUnicode_Check(value)) { + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x02; + return write_unicode(buffer, value); + } else if (PyDateTime_Check(value)) { + long long millis; + PyObject* utcoffset = PyObject_CallMethodObjArgs(value, state->_utcoffset_str , NULL); + if (utcoffset == NULL) + return 0; + if (utcoffset != Py_None) { + PyObject* result = PyNumber_Subtract(value, utcoffset); + Py_DECREF(utcoffset); + if (!result) { + return 0; + } + millis = millis_from_datetime(result); + Py_DECREF(result); + } else { + millis = millis_from_datetime(value); + } + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x09; + return buffer_write_int64(buffer, (int64_t)millis); + } else if (PyObject_TypeCheck(value, (PyTypeObject *) state->DatetimeMS)) { + long long millis; + if (!millis_from_datetime_ms(value, &millis)) { + return 0; + } + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x09; + return buffer_write_int64(buffer, (int64_t)millis); + } else if (PyObject_TypeCheck(value, state->REType)) { + return _write_regex_to_buffer(buffer, type_byte, value, state->_flags_str, state->_pattern_str); + } + + /* + * Try Mapping and UUID last since we have to import + * them if we're in a sub-interpreter. + */ + mapping_type = _get_object(state->Mapping, "collections.abc", "Mapping"); + if (mapping_type && PyObject_IsInstance(value, mapping_type)) { + Py_DECREF(mapping_type); + /* PyObject_IsInstance returns -1 on error */ + if (PyErr_Occurred()) { + return 0; + } + *(pymongo_buffer_get_buffer(buffer) + type_byte) = 0x03; + return write_dict(self, buffer, value, check_keys, options, 0); + } + + uuid_type = _get_object(state->UUID, "uuid", "UUID"); + if (uuid_type && PyObject_IsInstance(value, uuid_type)) { + PyObject* binary_type = NULL; + PyObject* binary_value = NULL; + PyObject *uuid_rep_obj = NULL; + int result; + + Py_DECREF(uuid_type); + /* PyObject_IsInstance returns -1 on error */ + if (PyErr_Occurred()) { + return 0; + } + + binary_type = _get_object(state->Binary, "bson", "Binary"); + if (binary_type == NULL) { + return 0; + } + + if (!(uuid_rep_obj = PyLong_FromLong(options->uuid_rep))) { + return 0; + } + binary_value = PyObject_CallMethodObjArgs(binary_type, state->_from_uuid_str, value, uuid_rep_obj, NULL); + Py_DECREF(uuid_rep_obj); + + if (binary_value == NULL) { + Py_DECREF(binary_type); + return 0; + } + + result = _write_element_to_buffer(self, buffer, + type_byte, binary_value, + check_keys, options, + in_custom_call, + in_fallback_call); + Py_DECREF(binary_type); + Py_DECREF(binary_value); + return result; + } + Py_XDECREF(mapping_type); + Py_XDECREF(uuid_type); + + /* Try a custom encoder if one is provided and we have not already + * attempted to use a type encoder. */ + if (!in_custom_call && !options->type_registry.is_encoder_empty) { + PyObject* value_type = NULL; + PyObject* converter = NULL; + value_type = PyObject_Type(value); + if (value_type == NULL) { + return 0; + } + converter = PyDict_GetItem(options->type_registry.encoder_map, value_type); + Py_XDECREF(value_type); + if (converter != NULL) { + /* Transform types that have a registered converter. + * A new reference is created upon transformation. */ + new_value = PyObject_CallFunctionObjArgs(converter, value, NULL); + if (new_value == NULL) { + return 0; + } + retval = write_element_to_buffer(self, buffer, type_byte, new_value, + check_keys, options, 1, 0); + Py_XDECREF(new_value); + return retval; + } + } + + /* Try the fallback encoder if one is provided and we have not already + * attempted to use the fallback encoder. */ + if (!in_fallback_call && options->type_registry.has_fallback_encoder) { + new_value = PyObject_CallFunctionObjArgs( + options->type_registry.fallback_encoder, value, NULL); + if (new_value == NULL) { + // propagate any exception raised by the callback + return 0; + } + retval = write_element_to_buffer(self, buffer, type_byte, new_value, + check_keys, options, 0, 1); + Py_XDECREF(new_value); + return retval; + } + + /* We can't determine value's type. Fail. */ + _set_cannot_encode(value); + return 0; +} + +static int check_key_name(const char* name, int name_length) { + + if (name_length > 0 && name[0] == '$') { + PyObject* InvalidDocument = _error("InvalidDocument"); + if (InvalidDocument) { + PyObject* errmsg = PyUnicode_FromFormat( + "key '%s' must not start with '$'", name); + if (errmsg) { + PyErr_SetObject(InvalidDocument, errmsg); + Py_DECREF(errmsg); + } + Py_DECREF(InvalidDocument); + } + return 0; + } + if (strchr(name, '.')) { + PyObject* InvalidDocument = _error("InvalidDocument"); + if (InvalidDocument) { + PyObject* errmsg = PyUnicode_FromFormat( + "key '%s' must not contain '.'", name); + if (errmsg) { + PyErr_SetObject(InvalidDocument, errmsg); + Py_DECREF(errmsg); + } + Py_DECREF(InvalidDocument); + } + return 0; + } + return 1; +} + +/* Write a (key, value) pair to the buffer. + * + * Returns 0 on failure */ +int write_pair(PyObject* self, buffer_t buffer, const char* name, int name_length, + PyObject* value, unsigned char check_keys, + const codec_options_t* options, unsigned char allow_id) { + int type_byte; + + /* Don't write any _id elements unless we're explicitly told to - + * _id has to be written first so we do so, but don't bother + * deleting it from the dictionary being written. */ + if (!allow_id && strcmp(name, "_id") == 0) { + return 1; + } + + type_byte = pymongo_buffer_save_space(buffer, 1); + if (type_byte == -1) { + return 0; + } + if (check_keys && !check_key_name(name, name_length)) { + return 0; + } + if (!buffer_write_bytes(buffer, name, name_length + 1)) { + return 0; + } + if (!write_element_to_buffer(self, buffer, type_byte, + value, check_keys, options, 0, 0)) { + return 0; + } + return 1; +} + +int decode_and_write_pair(PyObject* self, buffer_t buffer, + PyObject* key, PyObject* value, + unsigned char check_keys, + const codec_options_t* options, + unsigned char top_level) { + PyObject* encoded; + const char* data; + int size; + if (PyUnicode_Check(key)) { + encoded = PyUnicode_AsUTF8String(key); + if (!encoded) { + return 0; + } + if (!(data = PyBytes_AS_STRING(encoded))) { + Py_DECREF(encoded); + return 0; + } + if ((size = _downcast_and_check(PyBytes_GET_SIZE(encoded), 1)) == -1) { + Py_DECREF(encoded); + return 0; + } + if (strlen(data) != (size_t)(size - 1)) { + PyObject* InvalidDocument = _error("InvalidDocument"); + if (InvalidDocument) { + PyErr_SetString(InvalidDocument, + "Key names must not contain the NULL byte"); + Py_DECREF(InvalidDocument); + } + Py_DECREF(encoded); + return 0; + } + } else { + PyObject* InvalidDocument = _error("InvalidDocument"); + if (InvalidDocument) { + PyObject* repr = PyObject_Repr(key); + if (repr) { + PyObject* errmsg = PyUnicode_FromString( + "documents must have only string keys, key was "); + if (errmsg) { + PyObject* error = PyUnicode_Concat(errmsg, repr); + if (error) { + PyErr_SetObject(InvalidDocument, error); + Py_DECREF(error); + } + Py_DECREF(errmsg); + Py_DECREF(repr); + } else { + Py_DECREF(repr); + } + } + Py_DECREF(InvalidDocument); + } + return 0; + } + + /* If top_level is True, don't allow writing _id here - it was already written. */ + if (!write_pair(self, buffer, data, + size - 1, value, check_keys, options, !top_level)) { + Py_DECREF(encoded); + return 0; + } + + Py_DECREF(encoded); + return 1; +} + + +/* Write a RawBSONDocument to the buffer. + * Returns the number of bytes written or 0 on failure. + */ +static int write_raw_doc(buffer_t buffer, PyObject* raw, PyObject* _raw_str) { + char* bytes; + Py_ssize_t len; + int len_int; + int bytes_written = 0; + PyObject* bytes_obj = NULL; + + bytes_obj = PyObject_GetAttr(raw, _raw_str); + if (!bytes_obj) { + goto fail; + } + + if (-1 == PyBytes_AsStringAndSize(bytes_obj, &bytes, &len)) { + goto fail; + } + len_int = _downcast_and_check(len, 0); + if (-1 == len_int) { + goto fail; + } + if (!buffer_write_bytes(buffer, bytes, len_int)) { + goto fail; + } + bytes_written = len_int; +fail: + Py_XDECREF(bytes_obj); + return bytes_written; +} + +/* returns the number of bytes written or 0 on failure */ +int write_dict(PyObject* self, buffer_t buffer, + PyObject* dict, unsigned char check_keys, + const codec_options_t* options, unsigned char top_level) { + PyObject* key; + PyObject* iter; + char zero = 0; + int length; + int length_location; + struct module_state *state = GETSTATE(self); + PyObject* mapping_type; + long type_marker; + int is_dict = PyDict_Check(dict); + + if (!is_dict) { + /* check for RawBSONDocument */ + type_marker = _type_marker(dict, state->_type_marker_str); + if (type_marker < 0) { + return 0; + } + + if (101 == type_marker) { + return write_raw_doc(buffer, dict, state->_raw_str); + } + + mapping_type = _get_object(state->Mapping, "collections.abc", "Mapping"); + + if (mapping_type) { + if (!PyObject_IsInstance(dict, mapping_type)) { + PyObject* repr; + Py_DECREF(mapping_type); + if ((repr = PyObject_Repr(dict))) { + PyObject* errmsg = PyUnicode_FromString( + "encoder expected a mapping type but got: "); + if (errmsg) { + PyObject* error = PyUnicode_Concat(errmsg, repr); + if (error) { + PyErr_SetObject(PyExc_TypeError, error); + Py_DECREF(error); + } + Py_DECREF(errmsg); + Py_DECREF(repr); + } + else { + Py_DECREF(repr); + } + } else { + PyErr_SetString(PyExc_TypeError, + "encoder expected a mapping type"); + } + + return 0; + } + Py_DECREF(mapping_type); + /* PyObject_IsInstance returns -1 on error */ + if (PyErr_Occurred()) { + return 0; + } + } + } + + length_location = pymongo_buffer_save_space(buffer, 4); + if (length_location == -1) { + return 0; + } + + /* Write _id first if this is a top level doc. */ + if (top_level) { + /* + * If "dict" is a defaultdict we don't want to call + * PyObject_GetItem on it. That would **create** + * an _id where one didn't previously exist (PYTHON-871). + */ + if (is_dict) { + /* PyDict_GetItem returns a borrowed reference. */ + PyObject* _id = PyDict_GetItem(dict, state->_id_str); + if (_id) { + if (!write_pair(self, buffer, "_id", 3, + _id, check_keys, options, 1)) { + return 0; + } + } + } else if (PyMapping_HasKey(dict, state->_id_str)) { + PyObject* _id = PyObject_GetItem(dict, state->_id_str); + if (!_id) { + return 0; + } + if (!write_pair(self, buffer, "_id", 3, + _id, check_keys, options, 1)) { + Py_DECREF(_id); + return 0; + } + /* PyObject_GetItem returns a new reference. */ + Py_DECREF(_id); + } + } + + if (is_dict) { + PyObject* value; + Py_ssize_t pos = 0; + while (PyDict_Next(dict, &pos, &key, &value)) { + if (!decode_and_write_pair(self, buffer, key, value, + check_keys, options, top_level)) { + return 0; + } + } + } else { + iter = PyObject_GetIter(dict); + if (iter == NULL) { + return 0; + } + while ((key = PyIter_Next(iter)) != NULL) { + PyObject* value = PyObject_GetItem(dict, key); + if (!value) { + PyErr_SetObject(PyExc_KeyError, key); + Py_DECREF(key); + Py_DECREF(iter); + return 0; + } + if (!decode_and_write_pair(self, buffer, key, value, + check_keys, options, top_level)) { + Py_DECREF(key); + Py_DECREF(value); + Py_DECREF(iter); + return 0; + } + Py_DECREF(key); + Py_DECREF(value); + } + Py_DECREF(iter); + if (PyErr_Occurred()) { + return 0; + } + } + + /* write null byte and fill in length */ + if (!buffer_write_bytes(buffer, &zero, 1)) { + return 0; + } + length = pymongo_buffer_get_position(buffer) - length_location; + buffer_write_int32_at_position( + buffer, length_location, (int32_t)length); + return length; +} + +static PyObject* _cbson_dict_to_bson(PyObject* self, PyObject* args) { + PyObject* dict; + PyObject* result; + unsigned char check_keys; + unsigned char top_level = 1; + PyObject* options_obj; + codec_options_t options; + buffer_t buffer; + PyObject* raw_bson_document_bytes_obj; + long type_marker; + struct module_state *state = GETSTATE(self); + + if (!(PyArg_ParseTuple(args, "ObO|b", &dict, &check_keys, + &options_obj, &top_level) && + convert_codec_options(self, options_obj, &options))) { + return NULL; + } + + /* check for RawBSONDocument */ + type_marker = _type_marker(dict, state->_type_marker_str); + if (type_marker < 0) { + destroy_codec_options(&options); + return NULL; + } else if (101 == type_marker) { + destroy_codec_options(&options); + raw_bson_document_bytes_obj = PyObject_GetAttr(dict, state->_raw_str); + if (NULL == raw_bson_document_bytes_obj) { + return NULL; + } + return raw_bson_document_bytes_obj; + } + + buffer = pymongo_buffer_new(); + if (!buffer) { + destroy_codec_options(&options); + return NULL; + } + + if (!write_dict(self, buffer, dict, check_keys, &options, top_level)) { + destroy_codec_options(&options); + pymongo_buffer_free(buffer); + return NULL; + } + + /* objectify buffer */ + result = Py_BuildValue("y#", pymongo_buffer_get_buffer(buffer), + (Py_ssize_t)pymongo_buffer_get_position(buffer)); + destroy_codec_options(&options); + pymongo_buffer_free(buffer); + return result; +} + +/* + * Hook for optional decoding BSON documents to DBRef. + */ +static PyObject *_dbref_hook(PyObject* self, PyObject* value) { + struct module_state *state = GETSTATE(self); + PyObject* dbref = NULL; + PyObject* dbref_type = NULL; + PyObject* ref = NULL; + PyObject* id = NULL; + PyObject* database = NULL; + PyObject* ret = NULL; + int db_present = 0; + + /* Decoding for DBRefs */ + if (PyMapping_HasKey(value, state->_dollar_ref_str) && PyMapping_HasKey(value, state->_dollar_id_str)) { /* DBRef */ + ref = PyObject_GetItem(value, state->_dollar_ref_str); + /* PyObject_GetItem returns NULL to indicate error. */ + if (!ref) { + goto invalid; + } + id = PyObject_GetItem(value, state->_dollar_id_str); + /* PyObject_GetItem returns NULL to indicate error. */ + if (!id) { + goto invalid; + } + + if (PyMapping_HasKey(value, state->_dollar_db_str)) { + database = PyObject_GetItem(value, state->_dollar_db_str); + if (!database) { + goto invalid; + } + db_present = 1; + } else { + database = Py_None; + Py_INCREF(database); + } + + // check types + if (!(PyUnicode_Check(ref) && (database == Py_None || PyUnicode_Check(database)))) { + ret = value; + goto invalid; + } + + PyMapping_DelItem(value, state->_dollar_ref_str); + PyMapping_DelItem(value, state->_dollar_id_str); + if (db_present) { + PyMapping_DelItem(value, state->_dollar_db_str); + } + + if ((dbref_type = _get_object(state->DBRef, "bson.dbref", "DBRef"))) { + dbref = PyObject_CallFunctionObjArgs(dbref_type, ref, id, database, value, NULL); + Py_DECREF(value); + ret = dbref; + } + } else { + ret = value; + } +invalid: + Py_XDECREF(dbref_type); + Py_XDECREF(ref); + Py_XDECREF(id); + Py_XDECREF(database); + return ret; +} + +static PyObject* get_value(PyObject* self, PyObject* name, const char* buffer, + unsigned* position, unsigned char type, + unsigned max, const codec_options_t* options, int raw_array) { + struct module_state *state = GETSTATE(self); + PyObject* value = NULL; + switch (type) { + case 1: + { + double d; + if (max < 8) { + goto invalid; + } + memcpy(&d, buffer + *position, 8); + value = PyFloat_FromDouble(BSON_DOUBLE_FROM_LE(d)); + *position += 8; + break; + } + case 2: + case 14: + { + uint32_t value_length; + if (max < 4) { + goto invalid; + } + memcpy(&value_length, buffer + *position, 4); + value_length = BSON_UINT32_FROM_LE(value_length); + /* Encoded string length + string */ + if (!value_length || max < value_length || max < 4 + value_length) { + goto invalid; + } + *position += 4; + /* Strings must end in \0 */ + if (buffer[*position + value_length - 1]) { + goto invalid; + } + value = PyUnicode_DecodeUTF8( + buffer + *position, value_length - 1, + options->unicode_decode_error_handler); + if (!value) { + goto invalid; + } + *position += value_length; + break; + } + case 3: + { + uint32_t size; + + if (max < 4) { + goto invalid; + } + memcpy(&size, buffer + *position, 4); + size = BSON_UINT32_FROM_LE(size); + if (size < BSON_MIN_SIZE || max < size) { + goto invalid; + } + /* Check for bad eoo */ + if (buffer[*position + size - 1]) { + goto invalid; + } + + if (options->is_raw_bson) { + value = PyObject_CallFunction( + options->document_class, "y#O", + buffer + *position, (Py_ssize_t)size, options->options_obj); + if (!value) { + goto invalid; + } + *position += size; + break; + } + + value = elements_to_dict(self, buffer + *position + 4, + size - 5, options); + if (!value) { + goto invalid; + } + + /* Hook for DBRefs */ + value = _dbref_hook(self, value); + if (!value) { + goto invalid; + } + + *position += size; + break; + } + case 4: + { + uint32_t size, end; + + if (max < 4) { + goto invalid; + } + memcpy(&size, buffer + *position, 4); + size = BSON_UINT32_FROM_LE(size); + if (size < BSON_MIN_SIZE || max < size) { + goto invalid; + } + + end = *position + size - 1; + /* Check for bad eoo */ + if (buffer[end]) { + goto invalid; + } + + if (raw_array != 0) { + // Treat it as a binary buffer. + value = PyBytes_FromStringAndSize(buffer + *position, size); + *position += size; + break; + } + + *position += 4; + + value = PyList_New(0); + if (!value) { + goto invalid; + } + while (*position < end) { + PyObject* to_append; + + unsigned char bson_type = (unsigned char)buffer[(*position)++]; + + size_t key_size = strlen(buffer + *position); + if (max < key_size) { + Py_DECREF(value); + goto invalid; + } + /* just skip the key, they're in order. */ + *position += (unsigned)key_size + 1; + if (Py_EnterRecursiveCall(" while decoding a list value")) { + Py_DECREF(value); + goto invalid; + } + to_append = get_value(self, name, buffer, position, bson_type, + max - (unsigned)key_size, options, raw_array); + Py_LeaveRecursiveCall(); + if (!to_append) { + Py_DECREF(value); + goto invalid; + } + if (PyList_Append(value, to_append) < 0) { + Py_DECREF(value); + Py_DECREF(to_append); + goto invalid; + } + Py_DECREF(to_append); + } + if (*position != end) { + goto invalid; + } + (*position)++; + break; + } + case 5: + { + PyObject* data; + PyObject* st; + PyObject* type_to_create; + uint32_t length, length2; + unsigned char subtype; + + if (max < 5) { + goto invalid; + } + memcpy(&length, buffer + *position, 4); + length = BSON_UINT32_FROM_LE(length); + if (max < length) { + goto invalid; + } + + subtype = (unsigned char)buffer[*position + 4]; + *position += 5; + if (subtype == 2) { + if (length < 4) { + goto invalid; + } + memcpy(&length2, buffer + *position, 4); + length2 = BSON_UINT32_FROM_LE(length2); + if (length2 != length - 4) { + goto invalid; + } + } + /* Python3 special case. Decode BSON binary subtype 0 to bytes. */ + if (subtype == 0) { + value = PyBytes_FromStringAndSize(buffer + *position, length); + *position += length; + break; + } + if (subtype == 2) { + data = PyBytes_FromStringAndSize(buffer + *position + 4, length - 4); + } else { + data = PyBytes_FromStringAndSize(buffer + *position, length); + } + if (!data) { + goto invalid; + } + /* Encode as UUID or Binary based on options->uuid_rep */ + if (subtype == 3 || subtype == 4) { + PyObject* binary_type = NULL; + PyObject* binary_value = NULL; + char uuid_rep = options->uuid_rep; + + /* UUID should always be 16 bytes */ + if (length != 16) { + goto uuiderror; + } + + binary_type = _get_object(state->Binary, "bson", "Binary"); + if (binary_type == NULL) { + goto uuiderror; + } + + binary_value = PyObject_CallFunction(binary_type, "(Oi)", data, subtype); + if (binary_value == NULL) { + goto uuiderror; + } + + if ((uuid_rep == UNSPECIFIED) || + (subtype == 4 && uuid_rep != STANDARD) || + (subtype == 3 && uuid_rep == STANDARD)) { + value = binary_value; + Py_INCREF(value); + } else { + PyObject *uuid_rep_obj = PyLong_FromLong(uuid_rep); + if (!uuid_rep_obj) { + goto uuiderror; + } + value = PyObject_CallMethodObjArgs(binary_value, state->_as_uuid_str, uuid_rep_obj, NULL); + Py_DECREF(uuid_rep_obj); + } + + uuiderror: + Py_XDECREF(binary_type); + Py_XDECREF(binary_value); + Py_DECREF(data); + if (!value) { + goto invalid; + } + *position += length; + break; + } + + st = PyLong_FromLong(subtype); + if (!st) { + Py_DECREF(data); + goto invalid; + } + if ((type_to_create = _get_object(state->Binary, "bson.binary", "Binary"))) { + value = PyObject_CallFunctionObjArgs(type_to_create, data, st, NULL); + Py_DECREF(type_to_create); + } + Py_DECREF(st); + Py_DECREF(data); + if (!value) { + goto invalid; + } + *position += length; + break; + } + case 6: + case 10: + { + value = Py_None; + Py_INCREF(value); + break; + } + case 7: + { + PyObject* objectid_type; + if (max < 12) { + goto invalid; + } + if ((objectid_type = _get_object(state->ObjectId, "bson.objectid", "ObjectId"))) { + value = PyObject_CallFunction(objectid_type, "y#", + buffer + *position, (Py_ssize_t)12); + Py_DECREF(objectid_type); + } + *position += 12; + break; + } + case 8: + { + char boolean_raw = buffer[(*position)++]; + if (0 == boolean_raw) { + value = Py_False; + } else if (1 == boolean_raw) { + value = Py_True; + } else { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_Format(InvalidBSON, "invalid boolean value: %x", boolean_raw); + Py_DECREF(InvalidBSON); + } + return NULL; + } + Py_INCREF(value); + break; + } + case 9: + { + PyObject* utc_type; + PyObject* naive; + PyObject* replace; + PyObject* args; + PyObject* kwargs; + PyObject* astimezone; + int64_t millis; + if (max < 8) { + goto invalid; + } + memcpy(&millis, buffer + *position, 8); + millis = (int64_t)BSON_UINT64_FROM_LE(millis); + *position += 8; + + if (options->datetime_conversion == DATETIME_MS){ + value = datetime_ms_from_millis(self, millis); + break; + } + + int dt_clamp = options->datetime_conversion == DATETIME_CLAMP; + int dt_auto = options->datetime_conversion == DATETIME_AUTO; + + + if (dt_clamp || dt_auto){ + PyObject *min_millis_fn = _get_object(state->_min_datetime_ms, "bson.datetime_ms", "_min_datetime_ms"); + PyObject *max_millis_fn = _get_object(state->_max_datetime_ms, "bson.datetime_ms", "_max_datetime_ms"); + PyObject *min_millis_fn_res; + PyObject *max_millis_fn_res; + int64_t min_millis; + int64_t max_millis; + + if (min_millis_fn == NULL || max_millis_fn == NULL) { + Py_XDECREF(min_millis_fn); + Py_XDECREF(max_millis_fn); + goto invalid; + } + + if (options->tz_aware){ + PyObject* tzinfo = options->tzinfo; + if (tzinfo == Py_None) { + // Default to UTC. + utc_type = _get_object(state->UTC, "bson.tz_util", "utc"); + tzinfo = utc_type; + } + min_millis_fn_res = PyObject_CallFunctionObjArgs(min_millis_fn, tzinfo, NULL); + max_millis_fn_res = PyObject_CallFunctionObjArgs(max_millis_fn, tzinfo, NULL); + } else { + min_millis_fn_res = PyObject_CallObject(min_millis_fn, NULL); + max_millis_fn_res = PyObject_CallObject(max_millis_fn, NULL); + } + + Py_DECREF(min_millis_fn); + Py_DECREF(max_millis_fn); + + if (!min_millis_fn_res || !max_millis_fn_res){ + Py_XDECREF(min_millis_fn_res); + Py_XDECREF(max_millis_fn_res); + goto invalid; + } + + min_millis = PyLong_AsLongLong(min_millis_fn_res); + max_millis = PyLong_AsLongLong(max_millis_fn_res); + + if ((min_millis == -1 || max_millis == -1) && PyErr_Occurred()) + { + // min/max_millis check + goto invalid; + } + + if (dt_clamp) { + if (millis < min_millis) { + millis = min_millis; + } else if (millis > max_millis) { + millis = max_millis; + } + // Continues from here to return a datetime. + } else { // dt_auto + if (millis < min_millis || millis > max_millis){ + value = datetime_ms_from_millis(self, millis); + break; // Out-of-range so done. + } + } + } + + naive = datetime_from_millis(millis); + if (!options->tz_aware) { /* In the naive case, we're done here. */ + value = naive; + break; + } + + if (!naive) { + goto invalid; + } + replace = PyObject_GetAttr(naive, state->_replace_str); + Py_DECREF(naive); + if (!replace) { + goto invalid; + } + args = PyTuple_New(0); + if (!args) { + Py_DECREF(replace); + goto invalid; + } + kwargs = PyDict_New(); + if (!kwargs) { + Py_DECREF(replace); + Py_DECREF(args); + goto invalid; + } + utc_type = _get_object(state->UTC, "bson.tz_util", "utc"); + if (!utc_type || PyDict_SetItem(kwargs, state->_tzinfo_str, utc_type) == -1) { + Py_DECREF(replace); + Py_DECREF(args); + Py_DECREF(kwargs); + Py_XDECREF(utc_type); + goto invalid; + } + Py_XDECREF(utc_type); + value = PyObject_Call(replace, args, kwargs); + if (!value) { + Py_DECREF(replace); + Py_DECREF(args); + Py_DECREF(kwargs); + goto invalid; + } + + /* convert to local time */ + if (options->tzinfo != Py_None) { + astimezone = PyObject_GetAttr(value, state->_astimezone_str); + Py_DECREF(value); + if (!astimezone) { + Py_DECREF(replace); + Py_DECREF(args); + Py_DECREF(kwargs); + goto invalid; + } + value = PyObject_CallFunctionObjArgs(astimezone, options->tzinfo, NULL); + Py_DECREF(astimezone); + } + + Py_DECREF(replace); + Py_DECREF(args); + Py_DECREF(kwargs); + break; + } + case 11: + { + PyObject* regex_class; + PyObject* pattern; + int flags; + size_t flags_length, i; + size_t pattern_length = strlen(buffer + *position); + if (pattern_length > BSON_MAX_SIZE || max < pattern_length) { + goto invalid; + } + pattern = PyUnicode_DecodeUTF8( + buffer + *position, pattern_length, + options->unicode_decode_error_handler); + if (!pattern) { + goto invalid; + } + *position += (unsigned)pattern_length + 1; + flags_length = strlen(buffer + *position); + if (flags_length > BSON_MAX_SIZE || + (BSON_MAX_SIZE - pattern_length) < flags_length) { + Py_DECREF(pattern); + goto invalid; + } + if (max < pattern_length + flags_length) { + Py_DECREF(pattern); + goto invalid; + } + flags = 0; + for (i = 0; i < flags_length; i++) { + if (buffer[*position + i] == 'i') { + flags |= 2; + } else if (buffer[*position + i] == 'l') { + flags |= 4; + } else if (buffer[*position + i] == 'm') { + flags |= 8; + } else if (buffer[*position + i] == 's') { + flags |= 16; + } else if (buffer[*position + i] == 'u') { + flags |= 32; + } else if (buffer[*position + i] == 'x') { + flags |= 64; + } + } + *position += (unsigned)flags_length + 1; + + regex_class = _get_object(state->Regex, "bson.regex", "Regex"); + if (regex_class) { + value = PyObject_CallFunction(regex_class, + "Oi", pattern, flags); + Py_DECREF(regex_class); + } + Py_DECREF(pattern); + break; + } + case 12: + { + uint32_t coll_length; + PyObject* collection; + PyObject* id = NULL; + PyObject* objectid_type; + PyObject* dbref_type; + + if (max < 4) { + goto invalid; + } + memcpy(&coll_length, buffer + *position, 4); + coll_length = BSON_UINT32_FROM_LE(coll_length); + /* Encoded string length + string + 12 byte ObjectId */ + if (!coll_length || max < coll_length || max < 4 + coll_length + 12) { + goto invalid; + } + *position += 4; + /* Strings must end in \0 */ + if (buffer[*position + coll_length - 1]) { + goto invalid; + } + + collection = PyUnicode_DecodeUTF8( + buffer + *position, coll_length - 1, + options->unicode_decode_error_handler); + if (!collection) { + goto invalid; + } + *position += coll_length; + + if ((objectid_type = _get_object(state->ObjectId, "bson.objectid", "ObjectId"))) { + id = PyObject_CallFunction(objectid_type, "y#", + buffer + *position, (Py_ssize_t)12); + Py_DECREF(objectid_type); + } + if (!id) { + Py_DECREF(collection); + goto invalid; + } + *position += 12; + if ((dbref_type = _get_object(state->DBRef, "bson.dbref", "DBRef"))) { + value = PyObject_CallFunctionObjArgs(dbref_type, collection, id, NULL); + Py_DECREF(dbref_type); + } + Py_DECREF(collection); + Py_DECREF(id); + break; + } + case 13: + { + PyObject* code; + PyObject* code_type; + uint32_t value_length; + if (max < 4) { + goto invalid; + } + memcpy(&value_length, buffer + *position, 4); + value_length = BSON_UINT32_FROM_LE(value_length); + /* Encoded string length + string */ + if (!value_length || max < value_length || max < 4 + value_length) { + goto invalid; + } + *position += 4; + /* Strings must end in \0 */ + if (buffer[*position + value_length - 1]) { + goto invalid; + } + code = PyUnicode_DecodeUTF8( + buffer + *position, value_length - 1, + options->unicode_decode_error_handler); + if (!code) { + goto invalid; + } + *position += value_length; + if ((code_type = _get_object(state->Code, "bson.code", "Code"))) { + value = PyObject_CallFunctionObjArgs(code_type, code, NULL, NULL); + Py_DECREF(code_type); + } + Py_DECREF(code); + break; + } + case 15: + { + uint32_t c_w_s_size; + uint32_t code_size; + uint32_t scope_size; + PyObject* code; + PyObject* scope; + PyObject* code_type; + + if (max < 8) { + goto invalid; + } + + memcpy(&c_w_s_size, buffer + *position, 4); + c_w_s_size = BSON_UINT32_FROM_LE(c_w_s_size); + *position += 4; + + if (max < c_w_s_size) { + goto invalid; + } + + memcpy(&code_size, buffer + *position, 4); + code_size = BSON_UINT32_FROM_LE(code_size); + /* code_w_scope length + code length + code + scope length */ + if (!code_size || max < code_size || max < 4 + 4 + code_size + 4) { + goto invalid; + } + *position += 4; + /* Strings must end in \0 */ + if (buffer[*position + code_size - 1]) { + goto invalid; + } + code = PyUnicode_DecodeUTF8( + buffer + *position, code_size - 1, + options->unicode_decode_error_handler); + if (!code) { + goto invalid; + } + *position += code_size; + + memcpy(&scope_size, buffer + *position, 4); + scope_size = BSON_UINT32_FROM_LE(scope_size); + if (scope_size < BSON_MIN_SIZE) { + Py_DECREF(code); + goto invalid; + } + /* code length + code + scope length + scope */ + if ((4 + code_size + 4 + scope_size) != c_w_s_size) { + Py_DECREF(code); + goto invalid; + } + + /* Check for bad eoo */ + if (buffer[*position + scope_size - 1]) { + goto invalid; + } + scope = elements_to_dict(self, buffer + *position + 4, + scope_size - 5, options); + if (!scope) { + Py_DECREF(code); + goto invalid; + } + *position += scope_size; + + if ((code_type = _get_object(state->Code, "bson.code", "Code"))) { + value = PyObject_CallFunctionObjArgs(code_type, code, scope, NULL); + Py_DECREF(code_type); + } + Py_DECREF(code); + Py_DECREF(scope); + break; + } + case 16: + { + int32_t i; + if (max < 4) { + goto invalid; + } + memcpy(&i, buffer + *position, 4); + i = (int32_t)BSON_UINT32_FROM_LE(i); + value = PyLong_FromLong(i); + if (!value) { + goto invalid; + } + *position += 4; + break; + } + case 17: + { + uint32_t time, inc; + PyObject* timestamp_type; + if (max < 8) { + goto invalid; + } + memcpy(&inc, buffer + *position, 4); + memcpy(&time, buffer + *position + 4, 4); + inc = BSON_UINT32_FROM_LE(inc); + time = BSON_UINT32_FROM_LE(time); + if ((timestamp_type = _get_object(state->Timestamp, "bson.timestamp", "Timestamp"))) { + value = PyObject_CallFunction(timestamp_type, "II", time, inc); + Py_DECREF(timestamp_type); + } + *position += 8; + break; + } + case 18: + { + int64_t ll; + PyObject* bson_int64_type = _get_object(state->BSONInt64, + "bson.int64", "Int64"); + if (!bson_int64_type) + goto invalid; + if (max < 8) { + Py_DECREF(bson_int64_type); + goto invalid; + } + memcpy(&ll, buffer + *position, 8); + ll = (int64_t)BSON_UINT64_FROM_LE(ll); + value = PyObject_CallFunction(bson_int64_type, "L", ll); + *position += 8; + Py_DECREF(bson_int64_type); + break; + } + case 19: + { + PyObject* dec128; + if (max < 16) { + goto invalid; + } + if ((dec128 = _get_object(state->Decimal128, + "bson.decimal128", + "Decimal128"))) { + PyObject *_bytes_obj = PyBytes_FromStringAndSize(buffer + *position, (Py_ssize_t)16); + if (!_bytes_obj) { + Py_DECREF(dec128); + goto invalid; + } + value = PyObject_CallMethodObjArgs(dec128, state->_from_bid_str, _bytes_obj, NULL); + Py_DECREF(dec128); + Py_DECREF(_bytes_obj); + } + *position += 16; + break; + } + case 255: + { + PyObject* minkey_type = _get_object(state->MinKey, "bson.min_key", "MinKey"); + if (!minkey_type) + goto invalid; + value = PyObject_CallFunctionObjArgs(minkey_type, NULL); + Py_DECREF(minkey_type); + break; + } + case 127: + { + PyObject* maxkey_type = _get_object(state->MaxKey, "bson.max_key", "MaxKey"); + if (!maxkey_type) + goto invalid; + value = PyObject_CallFunctionObjArgs(maxkey_type, NULL); + Py_DECREF(maxkey_type); + break; + } + default: + { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyObject* bobj = PyBytes_FromFormat("%c", type); + if (bobj) { + PyObject* repr = PyObject_Repr(bobj); + Py_DECREF(bobj); + /* + * See http://bugs.python.org/issue22023 for why we can't + * just use PyUnicode_FromFormat with %S or %R to do this + * work. + */ + if (repr) { + PyObject* left = PyUnicode_FromString( + "Detected unknown BSON type "); + if (left) { + PyObject* lmsg = PyUnicode_Concat(left, repr); + Py_DECREF(left); + if (lmsg) { + PyObject* errmsg = PyUnicode_FromFormat( + "%U for fieldname '%U'. Are you using the " + "latest driver version?", lmsg, name); + if (errmsg) { + PyErr_SetObject(InvalidBSON, errmsg); + Py_DECREF(errmsg); + } + Py_DECREF(lmsg); + } + } + Py_DECREF(repr); + } + } + Py_DECREF(InvalidBSON); + } + goto invalid; + } + } + + if (value) { + if (!options->type_registry.is_decoder_empty) { + PyObject* value_type = NULL; + PyObject* converter = NULL; + value_type = PyObject_Type(value); + if (value_type == NULL) { + goto invalid; + } + converter = PyDict_GetItem(options->type_registry.decoder_map, value_type); + if (converter != NULL) { + PyObject* new_value = PyObject_CallFunctionObjArgs(converter, value, NULL); + Py_DECREF(value_type); + Py_DECREF(value); + return new_value; + } else { + Py_DECREF(value_type); + return value; + } + } + return value; + } + + invalid: + + /* + * Wrap any non-InvalidBSON errors in InvalidBSON. + */ + if (PyErr_Occurred()) { + PyObject *etype, *evalue, *etrace; + PyObject *InvalidBSON; + + /* + * Calling _error clears the error state, so fetch it first. + */ + PyErr_Fetch(&etype, &evalue, &etrace); + + /* Dont reraise anything but PyExc_Exceptions as InvalidBSON. */ + if (PyErr_GivenExceptionMatches(etype, PyExc_Exception)) { + InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + if (!PyErr_GivenExceptionMatches(etype, InvalidBSON)) { + /* + * Raise InvalidBSON(str(e)). + */ + Py_DECREF(etype); + etype = InvalidBSON; + + if (evalue) { + PyObject *msg = PyObject_Str(evalue); + Py_DECREF(evalue); + evalue = msg; + } + PyErr_NormalizeException(&etype, &evalue, &etrace); + } else { + /* + * The current exception matches InvalidBSON, so we don't + * need this reference after all. + */ + Py_DECREF(InvalidBSON); + } + } + } + /* Steals references to args. */ + PyErr_Restore(etype, evalue, etrace); + } else { + PyObject *InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "invalid length or type code"); + Py_DECREF(InvalidBSON); + } + } + return NULL; +} + +/* + * Get the next 'name' and 'value' from a document in a string, whose position + * is provided. + * + * Returns the position of the next element in the document, or -1 on error. + */ +static int _element_to_dict(PyObject* self, const char* string, + unsigned position, unsigned max, + const codec_options_t* options, + int raw_array, + PyObject** name, PyObject** value) { + unsigned char type = (unsigned char)string[position++]; + size_t name_length = strlen(string + position); + if (name_length > BSON_MAX_SIZE || position + name_length >= max) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "field name too large"); + Py_DECREF(InvalidBSON); + } + return -1; + } + *name = PyUnicode_DecodeUTF8( + string + position, name_length, + options->unicode_decode_error_handler); + if (!*name) { + /* If NULL is returned then wrap the UnicodeDecodeError + in an InvalidBSON error */ + PyObject *etype, *evalue, *etrace; + PyObject *InvalidBSON; + + PyErr_Fetch(&etype, &evalue, &etrace); + if (PyErr_GivenExceptionMatches(etype, PyExc_Exception)) { + InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + Py_DECREF(etype); + etype = InvalidBSON; + + if (evalue) { + PyObject *msg = PyObject_Str(evalue); + Py_DECREF(evalue); + evalue = msg; + } + PyErr_NormalizeException(&etype, &evalue, &etrace); + } + } + PyErr_Restore(etype, evalue, etrace); + return -1; + } + position += (unsigned)name_length + 1; + *value = get_value(self, *name, string, &position, type, + max - position, options, raw_array); + if (!*value) { + Py_DECREF(*name); + return -1; + } + return position; +} + +static PyObject* _cbson_element_to_dict(PyObject* self, PyObject* args) { + /* TODO: Support buffer protocol */ + char* string; + PyObject* bson; + PyObject* options_obj; + codec_options_t options; + unsigned position; + unsigned max; + int new_position; + int raw_array = 0; + PyObject* name; + PyObject* value; + PyObject* result_tuple; + + if (!(PyArg_ParseTuple(args, "OIIOp", &bson, &position, &max, + &options_obj, &raw_array) && + convert_codec_options(self, options_obj, &options))) { + return NULL; + } + + if (!PyBytes_Check(bson)) { + PyErr_SetString(PyExc_TypeError, "argument to _element_to_dict must be a bytes object"); + return NULL; + } + string = PyBytes_AS_STRING(bson); + + new_position = _element_to_dict(self, string, position, max, &options, raw_array, &name, &value); + if (new_position < 0) { + return NULL; + } + + result_tuple = Py_BuildValue("NNi", name, value, new_position); + if (!result_tuple) { + Py_DECREF(name); + Py_DECREF(value); + return NULL; + } + + destroy_codec_options(&options); + return result_tuple; +} + +static PyObject* _elements_to_dict(PyObject* self, const char* string, + unsigned max, + const codec_options_t* options) { + unsigned position = 0; + PyObject* dict = PyObject_CallObject(options->document_class, NULL); + if (!dict) { + return NULL; + } + int raw_array = 0; + while (position < max) { + PyObject* name = NULL; + PyObject* value = NULL; + int new_position; + + new_position = _element_to_dict( + self, string, position, max, options, raw_array, &name, &value); + if (new_position < 0) { + Py_DECREF(dict); + return NULL; + } else { + position = (unsigned)new_position; + } + + PyObject_SetItem(dict, name, value); + Py_DECREF(name); + Py_DECREF(value); + } + return dict; +} + +static PyObject* elements_to_dict(PyObject* self, const char* string, + unsigned max, + const codec_options_t* options) { + PyObject* result; + if (Py_EnterRecursiveCall(" while decoding a BSON document")) + return NULL; + result = _elements_to_dict(self, string, max, options); + Py_LeaveRecursiveCall(); + return result; +} + +static int _get_buffer(PyObject *exporter, Py_buffer *view) { + if (PyObject_GetBuffer(exporter, view, PyBUF_SIMPLE) == -1) { + return 0; + } + if (!PyBuffer_IsContiguous(view, 'C')) { + PyErr_SetString(PyExc_ValueError, + "must be a contiguous buffer"); + goto fail; + } + if (!view->buf || view->len < 0) { + PyErr_SetString(PyExc_ValueError, "invalid buffer"); + goto fail; + } + if (view->itemsize != 1) { + PyErr_SetString(PyExc_ValueError, + "buffer data must be ascii or utf8"); + goto fail; + } + return 1; +fail: + PyBuffer_Release(view); + return 0; +} + +static PyObject* _cbson_bson_to_dict(PyObject* self, PyObject* args) { + int32_t size; + Py_ssize_t total_size; + const char* string; + PyObject* bson; + codec_options_t options; + PyObject* result = NULL; + PyObject* options_obj; + Py_buffer view = {0}; + + if (! (PyArg_ParseTuple(args, "OO", &bson, &options_obj) && + convert_codec_options(self, options_obj, &options))) { + return result; + } + + if (!_get_buffer(bson, &view)) { + destroy_codec_options(&options); + return result; + } + + total_size = view.len; + + if (total_size < BSON_MIN_SIZE) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, + "not enough data for a BSON document"); + Py_DECREF(InvalidBSON); + } + goto done;; + } + + string = (char*)view.buf; + memcpy(&size, string, 4); + size = (int32_t)BSON_UINT32_FROM_LE(size); + if (size < BSON_MIN_SIZE) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "invalid message size"); + Py_DECREF(InvalidBSON); + } + goto done; + } + + if (total_size < size || total_size > BSON_MAX_SIZE) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "objsize too large"); + Py_DECREF(InvalidBSON); + } + goto done; + } + + if (size != total_size || string[size - 1]) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "bad eoo"); + Py_DECREF(InvalidBSON); + } + goto done; + } + + /* No need to decode fields if using RawBSONDocument */ + if (options.is_raw_bson) { + result = PyObject_CallFunction( + options.document_class, "y#O", string, (Py_ssize_t)size, + options_obj); + } + else { + result = elements_to_dict(self, string + 4, (unsigned)size - 5, &options); + } +done: + PyBuffer_Release(&view); + destroy_codec_options(&options); + return result; +} + +static PyObject* _cbson_decode_all(PyObject* self, PyObject* args) { + int32_t size; + Py_ssize_t total_size; + const char* string; + PyObject* bson; + PyObject* dict; + PyObject* result = NULL; + codec_options_t options; + PyObject* options_obj = NULL; + Py_buffer view = {0}; + + if (!(PyArg_ParseTuple(args, "OO", &bson, &options_obj) && + convert_codec_options(self, options_obj, &options))) { + return NULL; + } + + if (!_get_buffer(bson, &view)) { + destroy_codec_options(&options); + return NULL; + } + total_size = view.len; + string = (char*)view.buf; + + if (!(result = PyList_New(0))) { + goto fail; + } + + while (total_size > 0) { + if (total_size < BSON_MIN_SIZE) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, + "not enough data for a BSON document"); + Py_DECREF(InvalidBSON); + } + Py_DECREF(result); + goto fail; + } + + memcpy(&size, string, 4); + size = (int32_t)BSON_UINT32_FROM_LE(size); + if (size < BSON_MIN_SIZE) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "invalid message size"); + Py_DECREF(InvalidBSON); + } + Py_DECREF(result); + goto fail; + } + + if (total_size < size) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "objsize too large"); + Py_DECREF(InvalidBSON); + } + Py_DECREF(result); + goto fail; + } + + if (string[size - 1]) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "bad eoo"); + Py_DECREF(InvalidBSON); + } + Py_DECREF(result); + goto fail; + } + + /* No need to decode fields if using RawBSONDocument. */ + if (options.is_raw_bson) { + dict = PyObject_CallFunction( + options.document_class, "y#O", string, (Py_ssize_t)size, + options_obj); + } else { + dict = elements_to_dict(self, string + 4, (unsigned)size - 5, &options); + } + if (!dict) { + Py_DECREF(result); + goto fail; + } + if (PyList_Append(result, dict) < 0) { + Py_DECREF(dict); + Py_DECREF(result); + goto fail; + } + Py_DECREF(dict); + string += size; + total_size -= size; + } + goto done; +fail: + result = NULL; +done: + PyBuffer_Release(&view); + destroy_codec_options(&options); + return result; +} + + +static PyObject* _cbson_array_of_documents_to_buffer(PyObject* self, PyObject* args) { + uint32_t size; + uint32_t value_length; + uint32_t position = 0; + buffer_t buffer; + const char* string; + PyObject* arr; + PyObject* result = NULL; + Py_buffer view = {0}; + + if (!PyArg_ParseTuple(args, "O", &arr)) { + return NULL; + } + + if (!_get_buffer(arr, &view)) { + return NULL; + } + + buffer = pymongo_buffer_new(); + if (!buffer) { + PyBuffer_Release(&view); + return NULL; + } + + string = (char*)view.buf; + + if (view.len < BSON_MIN_SIZE) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, + "not enough data for a BSON document"); + Py_DECREF(InvalidBSON); + } + goto done; + } + + memcpy(&size, string, 4); + size = BSON_UINT32_FROM_LE(size); + /* save space for length */ + if (pymongo_buffer_save_space(buffer, size) == -1) { + goto fail; + } + pymongo_buffer_update_position(buffer, 0); + + position += 4; + while (position < size - 1) { + // Verify the value is an object. + unsigned char type = (unsigned char)string[position]; + if (type != 3) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "array element was not an object"); + Py_DECREF(InvalidBSON); + } + goto fail; + } + + // Just skip the keys. + position = position + strlen(string + position) + 1; + + if (position >= size || (size - position) < BSON_MIN_SIZE) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "invalid array content"); + Py_DECREF(InvalidBSON); + } + goto fail; + } + + memcpy(&value_length, string + position, 4); + value_length = BSON_UINT32_FROM_LE(value_length); + if (value_length < BSON_MIN_SIZE) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "invalid message size"); + Py_DECREF(InvalidBSON); + } + goto fail; + } + + if (view.len < size) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "objsize too large"); + Py_DECREF(InvalidBSON); + } + goto fail; + } + + if (string[size - 1]) { + PyObject* InvalidBSON = _error("InvalidBSON"); + if (InvalidBSON) { + PyErr_SetString(InvalidBSON, "bad eoo"); + Py_DECREF(InvalidBSON); + } + goto fail; + } + + if (pymongo_buffer_write(buffer, string + position, value_length) == 1) { + goto fail; + } + position += value_length; + } + + /* objectify buffer */ + result = Py_BuildValue("y#", pymongo_buffer_get_buffer(buffer), + (Py_ssize_t)pymongo_buffer_get_position(buffer)); + goto done; +fail: + result = NULL; +done: + PyBuffer_Release(&view); + pymongo_buffer_free(buffer); + return result; +} + + +static PyMethodDef _CBSONMethods[] = { + {"_dict_to_bson", _cbson_dict_to_bson, METH_VARARGS, + "convert a dictionary to a string containing its BSON representation."}, + {"_bson_to_dict", _cbson_bson_to_dict, METH_VARARGS, + "convert a BSON string to a SON object."}, + {"_decode_all", _cbson_decode_all, METH_VARARGS, + "convert binary data to a sequence of documents."}, + {"_element_to_dict", _cbson_element_to_dict, METH_VARARGS, + "Decode a single key, value pair."}, + {"_array_of_documents_to_buffer", _cbson_array_of_documents_to_buffer, METH_VARARGS, "Convert raw array of documents to a stream of BSON documents"}, + {"_test_long_long_to_str", _test_long_long_to_str, METH_VARARGS, "Test conversion of extreme and common Py_ssize_t values to str."}, + {NULL, NULL, 0, NULL} +}; + +#define INITERROR return NULL +static int _cbson_traverse(PyObject *m, visitproc visit, void *arg) { + Py_VISIT(GETSTATE(m)->Binary); + Py_VISIT(GETSTATE(m)->Code); + Py_VISIT(GETSTATE(m)->ObjectId); + Py_VISIT(GETSTATE(m)->DBRef); + Py_VISIT(GETSTATE(m)->Regex); + Py_VISIT(GETSTATE(m)->UUID); + Py_VISIT(GETSTATE(m)->Timestamp); + Py_VISIT(GETSTATE(m)->MinKey); + Py_VISIT(GETSTATE(m)->MaxKey); + Py_VISIT(GETSTATE(m)->UTC); + Py_VISIT(GETSTATE(m)->REType); + Py_VISIT(GETSTATE(m)->_type_marker_str); + Py_VISIT(GETSTATE(m)->_flags_str); + Py_VISIT(GETSTATE(m)->_pattern_str); + Py_VISIT(GETSTATE(m)->_encoder_map_str); + Py_VISIT(GETSTATE(m)->_decoder_map_str); + Py_VISIT(GETSTATE(m)->_fallback_encoder_str); + Py_VISIT(GETSTATE(m)->_raw_str); + Py_VISIT(GETSTATE(m)->_subtype_str); + Py_VISIT(GETSTATE(m)->_binary_str); + Py_VISIT(GETSTATE(m)->_scope_str); + Py_VISIT(GETSTATE(m)->_inc_str); + Py_VISIT(GETSTATE(m)->_time_str); + Py_VISIT(GETSTATE(m)->_bid_str); + Py_VISIT(GETSTATE(m)->_replace_str); + Py_VISIT(GETSTATE(m)->_astimezone_str); + Py_VISIT(GETSTATE(m)->_id_str); + Py_VISIT(GETSTATE(m)->_dollar_ref_str); + Py_VISIT(GETSTATE(m)->_dollar_id_str); + Py_VISIT(GETSTATE(m)->_dollar_db_str); + Py_VISIT(GETSTATE(m)->_tzinfo_str); + Py_VISIT(GETSTATE(m)->_as_doc_str); + Py_VISIT(GETSTATE(m)->_utcoffset_str); + Py_VISIT(GETSTATE(m)->_from_uuid_str); + Py_VISIT(GETSTATE(m)->_as_uuid_str); + Py_VISIT(GETSTATE(m)->_from_bid_str); + return 0; +} + +static int _cbson_clear(PyObject *m) { + Py_CLEAR(GETSTATE(m)->Binary); + Py_CLEAR(GETSTATE(m)->Code); + Py_CLEAR(GETSTATE(m)->ObjectId); + Py_CLEAR(GETSTATE(m)->DBRef); + Py_CLEAR(GETSTATE(m)->Regex); + Py_CLEAR(GETSTATE(m)->UUID); + Py_CLEAR(GETSTATE(m)->Timestamp); + Py_CLEAR(GETSTATE(m)->MinKey); + Py_CLEAR(GETSTATE(m)->MaxKey); + Py_CLEAR(GETSTATE(m)->UTC); + Py_CLEAR(GETSTATE(m)->REType); + Py_CLEAR(GETSTATE(m)->_type_marker_str); + Py_CLEAR(GETSTATE(m)->_flags_str); + Py_CLEAR(GETSTATE(m)->_pattern_str); + Py_CLEAR(GETSTATE(m)->_encoder_map_str); + Py_CLEAR(GETSTATE(m)->_decoder_map_str); + Py_CLEAR(GETSTATE(m)->_fallback_encoder_str); + Py_CLEAR(GETSTATE(m)->_raw_str); + Py_CLEAR(GETSTATE(m)->_subtype_str); + Py_CLEAR(GETSTATE(m)->_binary_str); + Py_CLEAR(GETSTATE(m)->_scope_str); + Py_CLEAR(GETSTATE(m)->_inc_str); + Py_CLEAR(GETSTATE(m)->_time_str); + Py_CLEAR(GETSTATE(m)->_bid_str); + Py_CLEAR(GETSTATE(m)->_replace_str); + Py_CLEAR(GETSTATE(m)->_astimezone_str); + Py_CLEAR(GETSTATE(m)->_id_str); + Py_CLEAR(GETSTATE(m)->_dollar_ref_str); + Py_CLEAR(GETSTATE(m)->_dollar_id_str); + Py_CLEAR(GETSTATE(m)->_dollar_db_str); + Py_CLEAR(GETSTATE(m)->_tzinfo_str); + Py_CLEAR(GETSTATE(m)->_as_doc_str); + Py_CLEAR(GETSTATE(m)->_utcoffset_str); + Py_CLEAR(GETSTATE(m)->_from_uuid_str); + Py_CLEAR(GETSTATE(m)->_as_uuid_str); + Py_CLEAR(GETSTATE(m)->_from_bid_str); + return 0; +} + +static struct PyModuleDef moduledef = { + PyModuleDef_HEAD_INIT, + "_cbson", + NULL, + sizeof(struct module_state), + _CBSONMethods, + NULL, + _cbson_traverse, + _cbson_clear, + NULL +}; + +PyMODINIT_FUNC +PyInit__cbson(void) +{ + PyObject *m; + PyObject *c_api_object; + static void *_cbson_API[_cbson_API_POINTER_COUNT]; + + PyDateTime_IMPORT; + if (PyDateTimeAPI == NULL) { + INITERROR; + } + + /* Export C API */ + _cbson_API[_cbson_buffer_write_bytes_INDEX] = (void *) buffer_write_bytes; + _cbson_API[_cbson_write_dict_INDEX] = (void *) write_dict; + _cbson_API[_cbson_write_pair_INDEX] = (void *) write_pair; + _cbson_API[_cbson_decode_and_write_pair_INDEX] = (void *) decode_and_write_pair; + _cbson_API[_cbson_convert_codec_options_INDEX] = (void *) convert_codec_options; + _cbson_API[_cbson_destroy_codec_options_INDEX] = (void *) destroy_codec_options; + _cbson_API[_cbson_buffer_write_double_INDEX] = (void *) buffer_write_double; + _cbson_API[_cbson_buffer_write_int32_INDEX] = (void *) buffer_write_int32; + _cbson_API[_cbson_buffer_write_int64_INDEX] = (void *) buffer_write_int64; + _cbson_API[_cbson_buffer_write_int32_at_position_INDEX] = + (void *) buffer_write_int32_at_position; + _cbson_API[_cbson_downcast_and_check_INDEX] = (void *) _downcast_and_check; + + c_api_object = PyCapsule_New((void *) _cbson_API, "_cbson._C_API", NULL); + if (c_api_object == NULL) + INITERROR; + + m = PyModule_Create(&moduledef); + if (m == NULL) { + Py_DECREF(c_api_object); + INITERROR; + } + + /* Import several python objects */ + if (_load_python_objects(m)) { + Py_DECREF(c_api_object); + Py_DECREF(m); + INITERROR; + } + + if (PyModule_AddObject(m, "_C_API", c_api_object) < 0) { + Py_DECREF(c_api_object); + Py_DECREF(m); + INITERROR; + } + + return m; +} diff --git a/backend/test/lib/python3.8/site-packages/bson/_cbsonmodule.h b/backend/test/lib/python3.8/site-packages/bson/_cbsonmodule.h new file mode 100644 index 0000000000000000000000000000000000000000..3be2b744276f7fab40372388bfaef4e82f542bc3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/_cbsonmodule.h @@ -0,0 +1,181 @@ +/* + * Copyright 2009-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "bson-endian.h" + +#ifndef _CBSONMODULE_H +#define _CBSONMODULE_H + +#if defined(WIN32) || defined(_MSC_VER) +/* + * This macro is basically an implementation of asprintf for win32 + * We print to the provided buffer to get the string value as an int. + * USE LL2STR. This is kept only to test LL2STR. + */ +#if defined(_MSC_VER) && (_MSC_VER >= 1400) +#define INT2STRING(buffer, i) \ + _snprintf_s((buffer), \ + _scprintf("%lld", (i)) + 1, \ + _scprintf("%lld", (i)) + 1, \ + "%lld", \ + (i)) +#define STRCAT(dest, n, src) strcat_s((dest), (n), (src)) +#else +#define INT2STRING(buffer, i) \ + _snprintf((buffer), \ + _scprintf("%lld", (i)) + 1, \ + "%lld", \ + (i)) +#define STRCAT(dest, n, src) strcat((dest), (src)) +#endif +#else +#define INT2STRING(buffer, i) snprintf((buffer), sizeof((buffer)), "%lld", (i)) +#define STRCAT(dest, n, src) strcat((dest), (src)) +#endif + +/* Just enough space in char array to hold LLONG_MIN and null terminator */ +#define BUF_SIZE 21 +/* Converts integer to its string representation in decimal notation. */ +extern int cbson_long_long_to_str(long long int num, char* str, size_t size); +#define LL2STR(buffer, i) cbson_long_long_to_str((i), (buffer), sizeof(buffer)) + +typedef struct type_registry_t { + PyObject* encoder_map; + PyObject* decoder_map; + PyObject* fallback_encoder; + PyObject* registry_obj; + unsigned char is_encoder_empty; + unsigned char is_decoder_empty; + unsigned char has_fallback_encoder; +} type_registry_t; + +typedef struct codec_options_t { + PyObject* document_class; + unsigned char tz_aware; + unsigned char uuid_rep; + char* unicode_decode_error_handler; + PyObject* tzinfo; + type_registry_t type_registry; + unsigned char datetime_conversion; + PyObject* options_obj; + unsigned char is_raw_bson; +} codec_options_t; + +/* C API functions */ +#define _cbson_buffer_write_bytes_INDEX 0 +#define _cbson_buffer_write_bytes_RETURN int +#define _cbson_buffer_write_bytes_PROTO (buffer_t buffer, const char* data, int size) + +#define _cbson_write_dict_INDEX 1 +#define _cbson_write_dict_RETURN int +#define _cbson_write_dict_PROTO (PyObject* self, buffer_t buffer, PyObject* dict, unsigned char check_keys, const codec_options_t* options, unsigned char top_level) + +#define _cbson_write_pair_INDEX 2 +#define _cbson_write_pair_RETURN int +#define _cbson_write_pair_PROTO (PyObject* self, buffer_t buffer, const char* name, int name_length, PyObject* value, unsigned char check_keys, const codec_options_t* options, unsigned char allow_id) + +#define _cbson_decode_and_write_pair_INDEX 3 +#define _cbson_decode_and_write_pair_RETURN int +#define _cbson_decode_and_write_pair_PROTO (PyObject* self, buffer_t buffer, PyObject* key, PyObject* value, unsigned char check_keys, const codec_options_t* options, unsigned char top_level) + +#define _cbson_convert_codec_options_INDEX 4 +#define _cbson_convert_codec_options_RETURN int +#define _cbson_convert_codec_options_PROTO (PyObject* self, PyObject* options_obj, codec_options_t* options) + +#define _cbson_destroy_codec_options_INDEX 5 +#define _cbson_destroy_codec_options_RETURN void +#define _cbson_destroy_codec_options_PROTO (codec_options_t* options) + +#define _cbson_buffer_write_double_INDEX 6 +#define _cbson_buffer_write_double_RETURN int +#define _cbson_buffer_write_double_PROTO (buffer_t buffer, double data) + +#define _cbson_buffer_write_int32_INDEX 7 +#define _cbson_buffer_write_int32_RETURN int +#define _cbson_buffer_write_int32_PROTO (buffer_t buffer, int32_t data) + +#define _cbson_buffer_write_int64_INDEX 8 +#define _cbson_buffer_write_int64_RETURN int +#define _cbson_buffer_write_int64_PROTO (buffer_t buffer, int64_t data) + +#define _cbson_buffer_write_int32_at_position_INDEX 9 +#define _cbson_buffer_write_int32_at_position_RETURN void +#define _cbson_buffer_write_int32_at_position_PROTO (buffer_t buffer, int position, int32_t data) + +#define _cbson_downcast_and_check_INDEX 10 +#define _cbson_downcast_and_check_RETURN int +#define _cbson_downcast_and_check_PROTO (Py_ssize_t size, uint8_t extra) + +/* Total number of C API pointers */ +#define _cbson_API_POINTER_COUNT 11 + +#ifdef _CBSON_MODULE +/* This section is used when compiling _cbsonmodule */ + +static _cbson_buffer_write_bytes_RETURN buffer_write_bytes _cbson_buffer_write_bytes_PROTO; + +static _cbson_write_dict_RETURN write_dict _cbson_write_dict_PROTO; + +static _cbson_write_pair_RETURN write_pair _cbson_write_pair_PROTO; + +static _cbson_decode_and_write_pair_RETURN decode_and_write_pair _cbson_decode_and_write_pair_PROTO; + +static _cbson_convert_codec_options_RETURN convert_codec_options _cbson_convert_codec_options_PROTO; + +static _cbson_destroy_codec_options_RETURN destroy_codec_options _cbson_destroy_codec_options_PROTO; + +static _cbson_buffer_write_double_RETURN buffer_write_double _cbson_buffer_write_double_PROTO; + +static _cbson_buffer_write_int32_RETURN buffer_write_int32 _cbson_buffer_write_int32_PROTO; + +static _cbson_buffer_write_int64_RETURN buffer_write_int64 _cbson_buffer_write_int64_PROTO; + +static _cbson_buffer_write_int32_at_position_RETURN buffer_write_int32_at_position _cbson_buffer_write_int32_at_position_PROTO; + +static _cbson_downcast_and_check_RETURN _downcast_and_check _cbson_downcast_and_check_PROTO; + +#else +/* This section is used in modules that use _cbsonmodule's API */ + +static void **_cbson_API; + +#define buffer_write_bytes (*(_cbson_buffer_write_bytes_RETURN (*)_cbson_buffer_write_bytes_PROTO) _cbson_API[_cbson_buffer_write_bytes_INDEX]) + +#define write_dict (*(_cbson_write_dict_RETURN (*)_cbson_write_dict_PROTO) _cbson_API[_cbson_write_dict_INDEX]) + +#define write_pair (*(_cbson_write_pair_RETURN (*)_cbson_write_pair_PROTO) _cbson_API[_cbson_write_pair_INDEX]) + +#define decode_and_write_pair (*(_cbson_decode_and_write_pair_RETURN (*)_cbson_decode_and_write_pair_PROTO) _cbson_API[_cbson_decode_and_write_pair_INDEX]) + +#define convert_codec_options (*(_cbson_convert_codec_options_RETURN (*)_cbson_convert_codec_options_PROTO) _cbson_API[_cbson_convert_codec_options_INDEX]) + +#define destroy_codec_options (*(_cbson_destroy_codec_options_RETURN (*)_cbson_destroy_codec_options_PROTO) _cbson_API[_cbson_destroy_codec_options_INDEX]) + +#define buffer_write_double (*(_cbson_buffer_write_double_RETURN (*)_cbson_buffer_write_double_PROTO) _cbson_API[_cbson_buffer_write_double_INDEX]) + +#define buffer_write_int32 (*(_cbson_buffer_write_int32_RETURN (*)_cbson_buffer_write_int32_PROTO) _cbson_API[_cbson_buffer_write_int32_INDEX]) + +#define buffer_write_int64 (*(_cbson_buffer_write_int64_RETURN (*)_cbson_buffer_write_int64_PROTO) _cbson_API[_cbson_buffer_write_int64_INDEX]) + +#define buffer_write_int32_at_position (*(_cbson_buffer_write_int32_at_position_RETURN (*)_cbson_buffer_write_int32_at_position_PROTO) _cbson_API[_cbson_buffer_write_int32_at_position_INDEX]) + +#define _downcast_and_check (*(_cbson_downcast_and_check_RETURN (*)_cbson_downcast_and_check_PROTO) _cbson_API[_cbson_downcast_and_check_INDEX]) + +#define _cbson_IMPORT _cbson_API = (void **)PyCapsule_Import("_cbson._C_API", 0) + +#endif + +#endif // _CBSONMODULE_H diff --git a/backend/test/lib/python3.8/site-packages/bson/_helpers.py b/backend/test/lib/python3.8/site-packages/bson/_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..5643d77c242878b261d15d69d07a4d7cf1baded6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/_helpers.py @@ -0,0 +1,41 @@ +# Copyright 2021-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Setstate and getstate functions for objects with __slots__, allowing +compatibility with default pickling protocol +""" +from typing import Any, Mapping + + +def _setstate_slots(self: Any, state: Any) -> None: + for slot, value in state.items(): + setattr(self, slot, value) + + +def _mangle_name(name: str, prefix: str) -> str: + if name.startswith("__"): + prefix = "_" + prefix + else: + prefix = "" + return prefix + name + + +def _getstate_slots(self: Any) -> Mapping[Any, Any]: + prefix = self.__class__.__name__ + ret = {} + for name in self.__slots__: + mangled_name = _mangle_name(name, prefix) + if hasattr(self, mangled_name): + ret[mangled_name] = getattr(self, mangled_name) + return ret diff --git a/backend/test/lib/python3.8/site-packages/bson/binary.py b/backend/test/lib/python3.8/site-packages/bson/binary.py new file mode 100644 index 0000000000000000000000000000000000000000..f8a475f8a3914f603774ac544034c89659a8e16d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/binary.py @@ -0,0 +1,369 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Any, Tuple, Type, Union +from uuid import UUID + +"""Tools for representing BSON binary data. +""" + +BINARY_SUBTYPE = 0 +"""BSON binary subtype for binary data. + +This is the default subtype for binary data. +""" + +FUNCTION_SUBTYPE = 1 +"""BSON binary subtype for functions. +""" + +OLD_BINARY_SUBTYPE = 2 +"""Old BSON binary subtype for binary data. + +This is the old default subtype, the current +default is :data:`BINARY_SUBTYPE`. +""" + +OLD_UUID_SUBTYPE = 3 +"""Old BSON binary subtype for a UUID. + +:class:`uuid.UUID` instances will automatically be encoded +by :mod:`bson` using this subtype when using +:data:`UuidRepresentation.PYTHON_LEGACY`, +:data:`UuidRepresentation.JAVA_LEGACY`, or +:data:`UuidRepresentation.CSHARP_LEGACY`. + +.. versionadded:: 2.1 +""" + +UUID_SUBTYPE = 4 +"""BSON binary subtype for a UUID. + +This is the standard BSON binary subtype for UUIDs. +:class:`uuid.UUID` instances will automatically be encoded +by :mod:`bson` using this subtype when using +:data:`UuidRepresentation.STANDARD`. +""" + + +if TYPE_CHECKING: + from array import array as _array + from mmap import mmap as _mmap + + +class UuidRepresentation: + UNSPECIFIED = 0 + """An unspecified UUID representation. + + When configured, :class:`uuid.UUID` instances will **not** be + automatically encoded to or decoded from :class:`~bson.binary.Binary`. + When encoding a :class:`uuid.UUID` instance, an error will be raised. + To encode a :class:`uuid.UUID` instance with this configuration, it must + be wrapped in the :class:`~bson.binary.Binary` class by the application + code. When decoding a BSON binary field with a UUID subtype, a + :class:`~bson.binary.Binary` instance will be returned instead of a + :class:`uuid.UUID` instance. + + See :ref:`unspecified-representation-details` for details. + + .. versionadded:: 3.11 + """ + + STANDARD = UUID_SUBTYPE + """The standard UUID representation. + + :class:`uuid.UUID` instances will automatically be encoded to + and decoded from BSON binary, using RFC-4122 byte order with + binary subtype :data:`UUID_SUBTYPE`. + + See :ref:`standard-representation-details` for details. + + .. versionadded:: 3.11 + """ + + PYTHON_LEGACY = OLD_UUID_SUBTYPE + """The Python legacy UUID representation. + + :class:`uuid.UUID` instances will automatically be encoded to + and decoded from BSON binary, using RFC-4122 byte order with + binary subtype :data:`OLD_UUID_SUBTYPE`. + + See :ref:`python-legacy-representation-details` for details. + + .. versionadded:: 3.11 + """ + + JAVA_LEGACY = 5 + """The Java legacy UUID representation. + + :class:`uuid.UUID` instances will automatically be encoded to + and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`, + using the Java driver's legacy byte order. + + See :ref:`java-legacy-representation-details` for details. + + .. versionadded:: 3.11 + """ + + CSHARP_LEGACY = 6 + """The C#/.net legacy UUID representation. + + :class:`uuid.UUID` instances will automatically be encoded to + and decoded from BSON binary subtype :data:`OLD_UUID_SUBTYPE`, + using the C# driver's legacy byte order. + + See :ref:`csharp-legacy-representation-details` for details. + + .. versionadded:: 3.11 + """ + + +STANDARD = UuidRepresentation.STANDARD +"""An alias for :data:`UuidRepresentation.STANDARD`. + +.. versionadded:: 3.0 +""" + +PYTHON_LEGACY = UuidRepresentation.PYTHON_LEGACY +"""An alias for :data:`UuidRepresentation.PYTHON_LEGACY`. + +.. versionadded:: 3.0 +""" + +JAVA_LEGACY = UuidRepresentation.JAVA_LEGACY +"""An alias for :data:`UuidRepresentation.JAVA_LEGACY`. + +.. versionchanged:: 3.6 + BSON binary subtype 4 is decoded using RFC-4122 byte order. +.. versionadded:: 2.3 +""" + +CSHARP_LEGACY = UuidRepresentation.CSHARP_LEGACY +"""An alias for :data:`UuidRepresentation.CSHARP_LEGACY`. + +.. versionchanged:: 3.6 + BSON binary subtype 4 is decoded using RFC-4122 byte order. +.. versionadded:: 2.3 +""" + +ALL_UUID_SUBTYPES = (OLD_UUID_SUBTYPE, UUID_SUBTYPE) +ALL_UUID_REPRESENTATIONS = ( + UuidRepresentation.UNSPECIFIED, + UuidRepresentation.STANDARD, + UuidRepresentation.PYTHON_LEGACY, + UuidRepresentation.JAVA_LEGACY, + UuidRepresentation.CSHARP_LEGACY, +) +UUID_REPRESENTATION_NAMES = { + UuidRepresentation.UNSPECIFIED: "UuidRepresentation.UNSPECIFIED", + UuidRepresentation.STANDARD: "UuidRepresentation.STANDARD", + UuidRepresentation.PYTHON_LEGACY: "UuidRepresentation.PYTHON_LEGACY", + UuidRepresentation.JAVA_LEGACY: "UuidRepresentation.JAVA_LEGACY", + UuidRepresentation.CSHARP_LEGACY: "UuidRepresentation.CSHARP_LEGACY", +} + +MD5_SUBTYPE = 5 +"""BSON binary subtype for an MD5 hash. +""" + +COLUMN_SUBTYPE = 7 +"""BSON binary subtype for columns. + +.. versionadded:: 4.0 +""" + +SENSITIVE_SUBTYPE = 8 +"""BSON binary subtype for sensitive data. + +.. versionadded:: 4.5 +""" + + +USER_DEFINED_SUBTYPE = 128 +"""BSON binary subtype for any user defined structure. +""" + + +class Binary(bytes): + """Representation of BSON binary data. + + This is necessary because we want to represent Python strings as + the BSON string type. We need to wrap binary data so we can tell + the difference between what should be considered binary data and + what should be considered a string when we encode to BSON. + + Raises TypeError if `data` is not an instance of :class:`bytes` + or `subtype` is not an instance of :class:`int`. + Raises ValueError if `subtype` is not in [0, 256). + + .. note:: + Instances of Binary with subtype 0 will be decoded directly to :class:`bytes`. + + :Parameters: + - `data`: the binary data to represent. Can be any bytes-like type + that implements the buffer protocol. + - `subtype` (optional): the `binary subtype + <https://bsonspec.org/spec.html>`_ + to use + + .. versionchanged:: 3.9 + Support any bytes-like type that implements the buffer protocol. + """ + + _type_marker = 5 + __subtype: int + + def __new__( + cls: Type["Binary"], + data: Union[memoryview, bytes, "_mmap", "_array"], + subtype: int = BINARY_SUBTYPE, + ) -> "Binary": + if not isinstance(subtype, int): + raise TypeError("subtype must be an instance of int") + if subtype >= 256 or subtype < 0: + raise ValueError("subtype must be contained in [0, 256)") + # Support any type that implements the buffer protocol. + self = bytes.__new__(cls, memoryview(data).tobytes()) + self.__subtype = subtype + return self + + @classmethod + def from_uuid( + cls: Type["Binary"], uuid: UUID, uuid_representation: int = UuidRepresentation.STANDARD + ) -> "Binary": + """Create a BSON Binary object from a Python UUID. + + Creates a :class:`~bson.binary.Binary` object from a + :class:`uuid.UUID` instance. Assumes that the native + :class:`uuid.UUID` instance uses the byte-order implied by the + provided ``uuid_representation``. + + Raises :exc:`TypeError` if `uuid` is not an instance of + :class:`~uuid.UUID`. + + :Parameters: + - `uuid`: A :class:`uuid.UUID` instance. + - `uuid_representation`: A member of + :class:`~bson.binary.UuidRepresentation`. Default: + :const:`~bson.binary.UuidRepresentation.STANDARD`. + See :ref:`handling-uuid-data-example` for details. + + .. versionadded:: 3.11 + """ + if not isinstance(uuid, UUID): + raise TypeError("uuid must be an instance of uuid.UUID") + + if uuid_representation not in ALL_UUID_REPRESENTATIONS: + raise ValueError( + "uuid_representation must be a value from bson.binary.UuidRepresentation" + ) + + if uuid_representation == UuidRepresentation.UNSPECIFIED: + raise ValueError( + "cannot encode native uuid.UUID with " + "UuidRepresentation.UNSPECIFIED. UUIDs can be manually " + "converted to bson.Binary instances using " + "bson.Binary.from_uuid() or a different UuidRepresentation " + "can be configured. See the documentation for " + "UuidRepresentation for more information." + ) + + subtype = OLD_UUID_SUBTYPE + if uuid_representation == UuidRepresentation.PYTHON_LEGACY: + payload = uuid.bytes + elif uuid_representation == UuidRepresentation.JAVA_LEGACY: + from_uuid = uuid.bytes + payload = from_uuid[0:8][::-1] + from_uuid[8:16][::-1] + elif uuid_representation == UuidRepresentation.CSHARP_LEGACY: + payload = uuid.bytes_le + else: + # uuid_representation == UuidRepresentation.STANDARD + subtype = UUID_SUBTYPE + payload = uuid.bytes + + return cls(payload, subtype) + + def as_uuid(self, uuid_representation: int = UuidRepresentation.STANDARD) -> UUID: + """Create a Python UUID from this BSON Binary object. + + Decodes this binary object as a native :class:`uuid.UUID` instance + with the provided ``uuid_representation``. + + Raises :exc:`ValueError` if this :class:`~bson.binary.Binary` instance + does not contain a UUID. + + :Parameters: + - `uuid_representation`: A member of + :class:`~bson.binary.UuidRepresentation`. Default: + :const:`~bson.binary.UuidRepresentation.STANDARD`. + See :ref:`handling-uuid-data-example` for details. + + .. versionadded:: 3.11 + """ + if self.subtype not in ALL_UUID_SUBTYPES: + raise ValueError(f"cannot decode subtype {self.subtype} as a uuid") + + if uuid_representation not in ALL_UUID_REPRESENTATIONS: + raise ValueError( + "uuid_representation must be a value from bson.binary.UuidRepresentation" + ) + + if uuid_representation == UuidRepresentation.UNSPECIFIED: + raise ValueError("uuid_representation cannot be UNSPECIFIED") + elif uuid_representation == UuidRepresentation.PYTHON_LEGACY: + if self.subtype == OLD_UUID_SUBTYPE: + return UUID(bytes=self) + elif uuid_representation == UuidRepresentation.JAVA_LEGACY: + if self.subtype == OLD_UUID_SUBTYPE: + return UUID(bytes=self[0:8][::-1] + self[8:16][::-1]) + elif uuid_representation == UuidRepresentation.CSHARP_LEGACY: + if self.subtype == OLD_UUID_SUBTYPE: + return UUID(bytes_le=self) + else: + # uuid_representation == UuidRepresentation.STANDARD + if self.subtype == UUID_SUBTYPE: + return UUID(bytes=self) + + raise ValueError( + f"cannot decode subtype {self.subtype} to {UUID_REPRESENTATION_NAMES[uuid_representation]}" + ) + + @property + def subtype(self) -> int: + """Subtype of this binary data.""" + return self.__subtype + + def __getnewargs__(self) -> Tuple[bytes, int]: # type: ignore[override] + # Work around http://bugs.python.org/issue7382 + data = super().__getnewargs__()[0] + if not isinstance(data, bytes): + data = data.encode("latin-1") + return data, self.__subtype + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Binary): + return (self.__subtype, bytes(self)) == (other.subtype, bytes(other)) + # We don't return NotImplemented here because if we did then + # Binary("foo") == "foo" would return True, since Binary is a + # subclass of str... + return False + + def __hash__(self) -> int: + return super().__hash__() ^ hash(self.__subtype) + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __repr__(self) -> str: + return f"Binary({bytes.__repr__(self)}, {self.__subtype})" diff --git a/backend/test/lib/python3.8/site-packages/bson/bson-endian.h b/backend/test/lib/python3.8/site-packages/bson/bson-endian.h new file mode 100644 index 0000000000000000000000000000000000000000..e906b0776f8f4ca8118cc6893f8b689eebc6109f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/bson-endian.h @@ -0,0 +1,233 @@ +/* + * Copyright 2013-2016 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef BSON_ENDIAN_H +#define BSON_ENDIAN_H + + +#if defined(__sun) +# include <sys/byteorder.h> +#endif + + +#ifdef _MSC_VER +# define BSON_INLINE __inline +#else +# include <stdint.h> +# define BSON_INLINE __inline__ +#endif + + +#define BSON_BIG_ENDIAN 4321 +#define BSON_LITTLE_ENDIAN 1234 + + +/* WORDS_BIGENDIAN from pyconfig.h / Python.h */ +#ifdef WORDS_BIGENDIAN +# define BSON_BYTE_ORDER BSON_BIG_ENDIAN +#else +# define BSON_BYTE_ORDER BSON_LITTLE_ENDIAN +#endif + + +#if defined(__sun) +# define BSON_UINT16_SWAP_LE_BE(v) BSWAP_16((uint16_t)v) +# define BSON_UINT32_SWAP_LE_BE(v) BSWAP_32((uint32_t)v) +# define BSON_UINT64_SWAP_LE_BE(v) BSWAP_64((uint64_t)v) +#elif defined(__clang__) && defined(__clang_major__) && defined(__clang_minor__) && \ + (__clang_major__ >= 3) && (__clang_minor__ >= 1) +# if __has_builtin(__builtin_bswap16) +# define BSON_UINT16_SWAP_LE_BE(v) __builtin_bswap16(v) +# endif +# if __has_builtin(__builtin_bswap32) +# define BSON_UINT32_SWAP_LE_BE(v) __builtin_bswap32(v) +# endif +# if __has_builtin(__builtin_bswap64) +# define BSON_UINT64_SWAP_LE_BE(v) __builtin_bswap64(v) +# endif +#elif defined(__GNUC__) && (__GNUC__ >= 4) +# if __GNUC__ >= 4 && defined (__GNUC_MINOR__) && __GNUC_MINOR__ >= 3 +# define BSON_UINT32_SWAP_LE_BE(v) __builtin_bswap32 ((uint32_t)v) +# define BSON_UINT64_SWAP_LE_BE(v) __builtin_bswap64 ((uint64_t)v) +# endif +# if __GNUC__ >= 4 && defined (__GNUC_MINOR__) && __GNUC_MINOR__ >= 8 +# define BSON_UINT16_SWAP_LE_BE(v) __builtin_bswap16 ((uint32_t)v) +# endif +#endif + + +#ifndef BSON_UINT16_SWAP_LE_BE +# define BSON_UINT16_SWAP_LE_BE(v) __bson_uint16_swap_slow ((uint16_t)v) +#endif + + +#ifndef BSON_UINT32_SWAP_LE_BE +# define BSON_UINT32_SWAP_LE_BE(v) __bson_uint32_swap_slow ((uint32_t)v) +#endif + + +#ifndef BSON_UINT64_SWAP_LE_BE +# define BSON_UINT64_SWAP_LE_BE(v) __bson_uint64_swap_slow ((uint64_t)v) +#endif + + +#if BSON_BYTE_ORDER == BSON_LITTLE_ENDIAN +# define BSON_UINT16_FROM_LE(v) ((uint16_t)v) +# define BSON_UINT16_TO_LE(v) ((uint16_t)v) +# define BSON_UINT16_FROM_BE(v) BSON_UINT16_SWAP_LE_BE (v) +# define BSON_UINT16_TO_BE(v) BSON_UINT16_SWAP_LE_BE (v) +# define BSON_UINT32_FROM_LE(v) ((uint32_t)v) +# define BSON_UINT32_TO_LE(v) ((uint32_t)v) +# define BSON_UINT32_FROM_BE(v) BSON_UINT32_SWAP_LE_BE (v) +# define BSON_UINT32_TO_BE(v) BSON_UINT32_SWAP_LE_BE (v) +# define BSON_UINT64_FROM_LE(v) ((uint64_t)v) +# define BSON_UINT64_TO_LE(v) ((uint64_t)v) +# define BSON_UINT64_FROM_BE(v) BSON_UINT64_SWAP_LE_BE (v) +# define BSON_UINT64_TO_BE(v) BSON_UINT64_SWAP_LE_BE (v) +# define BSON_DOUBLE_FROM_LE(v) ((double)v) +# define BSON_DOUBLE_TO_LE(v) ((double)v) +#elif BSON_BYTE_ORDER == BSON_BIG_ENDIAN +# define BSON_UINT16_FROM_LE(v) BSON_UINT16_SWAP_LE_BE (v) +# define BSON_UINT16_TO_LE(v) BSON_UINT16_SWAP_LE_BE (v) +# define BSON_UINT16_FROM_BE(v) ((uint16_t)v) +# define BSON_UINT16_TO_BE(v) ((uint16_t)v) +# define BSON_UINT32_FROM_LE(v) BSON_UINT32_SWAP_LE_BE (v) +# define BSON_UINT32_TO_LE(v) BSON_UINT32_SWAP_LE_BE (v) +# define BSON_UINT32_FROM_BE(v) ((uint32_t)v) +# define BSON_UINT32_TO_BE(v) ((uint32_t)v) +# define BSON_UINT64_FROM_LE(v) BSON_UINT64_SWAP_LE_BE (v) +# define BSON_UINT64_TO_LE(v) BSON_UINT64_SWAP_LE_BE (v) +# define BSON_UINT64_FROM_BE(v) ((uint64_t)v) +# define BSON_UINT64_TO_BE(v) ((uint64_t)v) +# define BSON_DOUBLE_FROM_LE(v) (__bson_double_swap_slow (v)) +# define BSON_DOUBLE_TO_LE(v) (__bson_double_swap_slow (v)) +#else +# error "The endianness of target architecture is unknown." +#endif + + +/* + *-------------------------------------------------------------------------- + * + * __bson_uint16_swap_slow -- + * + * Fallback endianness conversion for 16-bit integers. + * + * Returns: + * The endian swapped version. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + +static BSON_INLINE uint16_t +__bson_uint16_swap_slow (uint16_t v) /* IN */ +{ + return ((v & 0x00FF) << 8) | + ((v & 0xFF00) >> 8); +} + + +/* + *-------------------------------------------------------------------------- + * + * __bson_uint32_swap_slow -- + * + * Fallback endianness conversion for 32-bit integers. + * + * Returns: + * The endian swapped version. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + +static BSON_INLINE uint32_t +__bson_uint32_swap_slow (uint32_t v) /* IN */ +{ + return ((v & 0x000000FFU) << 24) | + ((v & 0x0000FF00U) << 8) | + ((v & 0x00FF0000U) >> 8) | + ((v & 0xFF000000U) >> 24); +} + + +/* + *-------------------------------------------------------------------------- + * + * __bson_uint64_swap_slow -- + * + * Fallback endianness conversion for 64-bit integers. + * + * Returns: + * The endian swapped version. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + +static BSON_INLINE uint64_t +__bson_uint64_swap_slow (uint64_t v) /* IN */ +{ + return ((v & 0x00000000000000FFULL) << 56) | + ((v & 0x000000000000FF00ULL) << 40) | + ((v & 0x0000000000FF0000ULL) << 24) | + ((v & 0x00000000FF000000ULL) << 8) | + ((v & 0x000000FF00000000ULL) >> 8) | + ((v & 0x0000FF0000000000ULL) >> 24) | + ((v & 0x00FF000000000000ULL) >> 40) | + ((v & 0xFF00000000000000ULL) >> 56); +} + + +/* + *-------------------------------------------------------------------------- + * + * __bson_double_swap_slow -- + * + * Fallback endianness conversion for double floating point. + * + * Returns: + * The endian swapped version. + * + * Side effects: + * None. + * + *-------------------------------------------------------------------------- + */ + + +static BSON_INLINE double +__bson_double_swap_slow (double v) /* IN */ +{ + uint64_t uv; + + memcpy(&uv, &v, sizeof(v)); + uv = BSON_UINT64_SWAP_LE_BE(uv); + memcpy(&v, &uv, sizeof(v)); + + return v; +} + + +#endif /* BSON_ENDIAN_H */ diff --git a/backend/test/lib/python3.8/site-packages/bson/buffer.c b/backend/test/lib/python3.8/site-packages/bson/buffer.c new file mode 100644 index 0000000000000000000000000000000000000000..cc75202746a4273dc1333436009fa5620544b023 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/buffer.c @@ -0,0 +1,157 @@ +/* + * Copyright 2009-2015 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Include Python.h so we can set Python's error indicator. */ +#define PY_SSIZE_T_CLEAN +#include "Python.h" + +#include <stdlib.h> +#include <string.h> + +#include "buffer.h" + +#define INITIAL_BUFFER_SIZE 256 + +struct buffer { + char* buffer; + int size; + int position; +}; + +/* Set Python's error indicator to MemoryError. + * Called after allocation failures. */ +static void set_memory_error(void) { + PyErr_NoMemory(); +} + +/* Allocate and return a new buffer. + * Return NULL and sets MemoryError on allocation failure. */ +buffer_t pymongo_buffer_new(void) { + buffer_t buffer; + buffer = (buffer_t)malloc(sizeof(struct buffer)); + if (buffer == NULL) { + set_memory_error(); + return NULL; + } + + buffer->size = INITIAL_BUFFER_SIZE; + buffer->position = 0; + buffer->buffer = (char*)malloc(sizeof(char) * INITIAL_BUFFER_SIZE); + if (buffer->buffer == NULL) { + free(buffer); + set_memory_error(); + return NULL; + } + + return buffer; +} + +/* Free the memory allocated for `buffer`. + * Return non-zero on failure. */ +int pymongo_buffer_free(buffer_t buffer) { + if (buffer == NULL) { + return 1; + } + /* Buffer will be NULL when buffer_grow fails. */ + if (buffer->buffer != NULL) { + free(buffer->buffer); + } + free(buffer); + return 0; +} + +/* Grow `buffer` to at least `min_length`. + * Return non-zero and sets MemoryError on allocation failure. */ +static int buffer_grow(buffer_t buffer, int min_length) { + int old_size = 0; + int size = buffer->size; + char* old_buffer = buffer->buffer; + if (size >= min_length) { + return 0; + } + while (size < min_length) { + old_size = size; + size *= 2; + if (size <= old_size) { + /* Size did not increase. Could be an overflow + * or size < 1. Just go with min_length. */ + size = min_length; + } + } + buffer->buffer = (char*)realloc(buffer->buffer, sizeof(char) * size); + if (buffer->buffer == NULL) { + free(old_buffer); + set_memory_error(); + return 1; + } + buffer->size = size; + return 0; +} + +/* Assure that `buffer` has at least `size` free bytes (and grow if needed). + * Return non-zero and sets MemoryError on allocation failure. + * Return non-zero and sets ValueError if `size` would exceed 2GiB. */ +static int buffer_assure_space(buffer_t buffer, int size) { + int new_size = buffer->position + size; + /* Check for overflow. */ + if (new_size < buffer->position) { + PyErr_SetString(PyExc_ValueError, + "Document would overflow BSON size limit"); + return 1; + } + + if (new_size <= buffer->size) { + return 0; + } + return buffer_grow(buffer, new_size); +} + +/* Save `size` bytes from the current position in `buffer` (and grow if needed). + * Return offset for writing, or -1 on failure. + * Sets MemoryError or ValueError on failure. */ +buffer_position pymongo_buffer_save_space(buffer_t buffer, int size) { + int position = buffer->position; + if (buffer_assure_space(buffer, size) != 0) { + return -1; + } + buffer->position += size; + return position; +} + +/* Write `size` bytes from `data` to `buffer` (and grow if needed). + * Return non-zero on failure. + * Sets MemoryError or ValueError on failure. */ +int pymongo_buffer_write(buffer_t buffer, const char* data, int size) { + if (buffer_assure_space(buffer, size) != 0) { + return 1; + } + + memcpy(buffer->buffer + buffer->position, data, size); + buffer->position += size; + return 0; +} + +int pymongo_buffer_get_position(buffer_t buffer) { + return buffer->position; +} + +char* pymongo_buffer_get_buffer(buffer_t buffer) { + return buffer->buffer; +} + +void pymongo_buffer_update_position(buffer_t buffer, buffer_position new_position) { + buffer->position = new_position; +} diff --git a/backend/test/lib/python3.8/site-packages/bson/buffer.h b/backend/test/lib/python3.8/site-packages/bson/buffer.h new file mode 100644 index 0000000000000000000000000000000000000000..a78e34e4deb1f68911302594e36d95edce883fca --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/buffer.h @@ -0,0 +1,51 @@ +/* + * Copyright 2009-2015 MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef BUFFER_H +#define BUFFER_H + +/* Note: if any of these functions return a failure condition then the buffer + * has already been freed. */ + +/* A buffer */ +typedef struct buffer* buffer_t; +/* A position in the buffer */ +typedef int buffer_position; + +/* Allocate and return a new buffer. + * Return NULL on allocation failure. */ +buffer_t pymongo_buffer_new(void); + +/* Free the memory allocated for `buffer`. + * Return non-zero on failure. */ +int pymongo_buffer_free(buffer_t buffer); + +/* Save `size` bytes from the current position in `buffer` (and grow if needed). + * Return offset for writing, or -1 on allocation failure. */ +buffer_position pymongo_buffer_save_space(buffer_t buffer, int size); + +/* Write `size` bytes from `data` to `buffer` (and grow if needed). + * Return non-zero on allocation failure. */ +int pymongo_buffer_write(buffer_t buffer, const char* data, int size); + +/* Getters for the internals of a buffer_t. + * Should try to avoid using these as much as possible + * since they break the abstraction. */ +buffer_position pymongo_buffer_get_position(buffer_t buffer); +char* pymongo_buffer_get_buffer(buffer_t buffer); +void pymongo_buffer_update_position(buffer_t buffer, buffer_position new_position); + +#endif diff --git a/backend/test/lib/python3.8/site-packages/bson/code.py b/backend/test/lib/python3.8/site-packages/bson/code.py new file mode 100644 index 0000000000000000000000000000000000000000..26bed0103d44213b1316cc30ce05ade8fade4534 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/code.py @@ -0,0 +1,100 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for representing JavaScript code in BSON.""" + +from collections.abc import Mapping as _Mapping +from typing import Any, Mapping, Optional, Type, Union + + +class Code(str): + """BSON's JavaScript code type. + + Raises :class:`TypeError` if `code` is not an instance of + :class:`str` or `scope` is not ``None`` or an instance + of :class:`dict`. + + Scope variables can be set by passing a dictionary as the `scope` + argument or by using keyword arguments. If a variable is set as a + keyword argument it will override any setting for that variable in + the `scope` dictionary. + + :Parameters: + - `code`: A string containing JavaScript code to be evaluated or another + instance of Code. In the latter case, the scope of `code` becomes this + Code's :attr:`scope`. + - `scope` (optional): dictionary representing the scope in which + `code` should be evaluated - a mapping from identifiers (as + strings) to values. Defaults to ``None``. This is applied after any + scope associated with a given `code` above. + - `**kwargs` (optional): scope variables can also be passed as + keyword arguments. These are applied after `scope` and `code`. + + .. versionchanged:: 3.4 + The default value for :attr:`scope` is ``None`` instead of ``{}``. + + """ + + _type_marker = 13 + __scope: Union[Mapping[str, Any], None] + + def __new__( + cls: Type["Code"], + code: Union[str, "Code"], + scope: Optional[Mapping[str, Any]] = None, + **kwargs: Any, + ) -> "Code": + if not isinstance(code, str): + raise TypeError("code must be an instance of str") + + self = str.__new__(cls, code) + + try: + self.__scope = code.scope # type: ignore + except AttributeError: + self.__scope = None + + if scope is not None: + if not isinstance(scope, _Mapping): + raise TypeError("scope must be an instance of dict") + if self.__scope is not None: + self.__scope.update(scope) # type: ignore + else: + self.__scope = scope + + if kwargs: + if self.__scope is not None: + self.__scope.update(kwargs) # type: ignore + else: + self.__scope = kwargs + + return self + + @property + def scope(self) -> Optional[Mapping[str, Any]]: + """Scope dictionary for this instance or ``None``.""" + return self.__scope + + def __repr__(self) -> str: + return f"Code({str.__repr__(self)}, {self.__scope!r})" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Code): + return (self.__scope, str(self)) == (other.__scope, str(other)) + return False + + __hash__: Any = None + + def __ne__(self, other: Any) -> bool: + return not self == other diff --git a/backend/test/lib/python3.8/site-packages/bson/codec_options.py b/backend/test/lib/python3.8/site-packages/bson/codec_options.py new file mode 100644 index 0000000000000000000000000000000000000000..9c511b5d6fcd80941ff18c207ec5e400e48febd9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/codec_options.py @@ -0,0 +1,507 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for specifying BSON codec options.""" + +import abc +import datetime +import enum +from collections.abc import MutableMapping as _MutableMapping +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Generic, + Iterable, + Mapping, + NamedTuple, + Optional, + Tuple, + Type, + Union, + cast, +) + +from bson.binary import ( + ALL_UUID_REPRESENTATIONS, + UUID_REPRESENTATION_NAMES, + UuidRepresentation, +) +from bson.typings import _DocumentType + +_RAW_BSON_DOCUMENT_MARKER = 101 + + +def _raw_document_class(document_class: Any) -> bool: + """Determine if a document_class is a RawBSONDocument class.""" + marker = getattr(document_class, "_type_marker", None) + return marker == _RAW_BSON_DOCUMENT_MARKER + + +class TypeEncoder(abc.ABC): + """Base class for defining type codec classes which describe how a + custom type can be transformed to one of the types BSON understands. + + Codec classes must implement the ``python_type`` attribute, and the + ``transform_python`` method to support encoding. + + See :ref:`custom-type-type-codec` documentation for an example. + """ + + @abc.abstractproperty + def python_type(self) -> Any: + """The Python type to be converted into something serializable.""" + + @abc.abstractmethod + def transform_python(self, value: Any) -> Any: + """Convert the given Python object into something serializable.""" + + +class TypeDecoder(abc.ABC): + """Base class for defining type codec classes which describe how a + BSON type can be transformed to a custom type. + + Codec classes must implement the ``bson_type`` attribute, and the + ``transform_bson`` method to support decoding. + + See :ref:`custom-type-type-codec` documentation for an example. + """ + + @abc.abstractproperty + def bson_type(self) -> Any: + """The BSON type to be converted into our own type.""" + + @abc.abstractmethod + def transform_bson(self, value: Any) -> Any: + """Convert the given BSON value into our own type.""" + + +class TypeCodec(TypeEncoder, TypeDecoder): + """Base class for defining type codec classes which describe how a + custom type can be transformed to/from one of the types :mod:`bson` + can already encode/decode. + + Codec classes must implement the ``python_type`` attribute, and the + ``transform_python`` method to support encoding, as well as the + ``bson_type`` attribute, and the ``transform_bson`` method to support + decoding. + + See :ref:`custom-type-type-codec` documentation for an example. + """ + + +_Codec = Union[TypeEncoder, TypeDecoder, TypeCodec] +_Fallback = Callable[[Any], Any] + + +class TypeRegistry: + """Encapsulates type codecs used in encoding and / or decoding BSON, as + well as the fallback encoder. Type registries cannot be modified after + instantiation. + + ``TypeRegistry`` can be initialized with an iterable of type codecs, and + a callable for the fallback encoder:: + + >>> from bson.codec_options import TypeRegistry + >>> type_registry = TypeRegistry([Codec1, Codec2, Codec3, ...], + ... fallback_encoder) + + See :ref:`custom-type-type-registry` documentation for an example. + + :Parameters: + - `type_codecs` (optional): iterable of type codec instances. If + ``type_codecs`` contains multiple codecs that transform a single + python or BSON type, the transformation specified by the type codec + occurring last prevails. A TypeError will be raised if one or more + type codecs modify the encoding behavior of a built-in :mod:`bson` + type. + - `fallback_encoder` (optional): callable that accepts a single, + unencodable python value and transforms it into a type that + :mod:`bson` can encode. See :ref:`fallback-encoder-callable` + documentation for an example. + """ + + def __init__( + self, + type_codecs: Optional[Iterable[_Codec]] = None, + fallback_encoder: Optional[_Fallback] = None, + ) -> None: + self.__type_codecs = list(type_codecs or []) + self._fallback_encoder = fallback_encoder + self._encoder_map: Dict[Any, Any] = {} + self._decoder_map: Dict[Any, Any] = {} + + if self._fallback_encoder is not None: + if not callable(fallback_encoder): + raise TypeError("fallback_encoder %r is not a callable" % (fallback_encoder)) + + for codec in self.__type_codecs: + is_valid_codec = False + if isinstance(codec, TypeEncoder): + self._validate_type_encoder(codec) + is_valid_codec = True + self._encoder_map[codec.python_type] = codec.transform_python + if isinstance(codec, TypeDecoder): + is_valid_codec = True + self._decoder_map[codec.bson_type] = codec.transform_bson + if not is_valid_codec: + raise TypeError( + f"Expected an instance of {TypeEncoder.__name__}, {TypeDecoder.__name__}, or {TypeCodec.__name__}, got {codec!r} instead" + ) + + def _validate_type_encoder(self, codec: _Codec) -> None: + from bson import _BUILT_IN_TYPES + + for pytype in _BUILT_IN_TYPES: + if issubclass(cast(TypeCodec, codec).python_type, pytype): + err_msg = ( + "TypeEncoders cannot change how built-in types are " + "encoded (encoder {} transforms type {})".format(codec, pytype) + ) + raise TypeError(err_msg) + + def __repr__(self) -> str: + return "{}(type_codecs={!r}, fallback_encoder={!r})".format( + self.__class__.__name__, + self.__type_codecs, + self._fallback_encoder, + ) + + def __eq__(self, other: Any) -> Any: + if not isinstance(other, type(self)): + return NotImplemented + return ( + (self._decoder_map == other._decoder_map) + and (self._encoder_map == other._encoder_map) + and (self._fallback_encoder == other._fallback_encoder) + ) + + +class DatetimeConversion(int, enum.Enum): + """Options for decoding BSON datetimes.""" + + DATETIME = 1 + """Decode a BSON UTC datetime as a :class:`datetime.datetime`. + + BSON UTC datetimes that cannot be represented as a + :class:`~datetime.datetime` will raise an :class:`OverflowError` + or a :class:`ValueError`. + + .. versionadded 4.3 + """ + + DATETIME_CLAMP = 2 + """Decode a BSON UTC datetime as a :class:`datetime.datetime`, clamping + to :attr:`~datetime.datetime.min` and :attr:`~datetime.datetime.max`. + + .. versionadded 4.3 + """ + + DATETIME_MS = 3 + """Decode a BSON UTC datetime as a :class:`~bson.datetime_ms.DatetimeMS` + object. + + .. versionadded 4.3 + """ + + DATETIME_AUTO = 4 + """Decode a BSON UTC datetime as a :class:`datetime.datetime` if possible, + and a :class:`~bson.datetime_ms.DatetimeMS` if not. + + .. versionadded 4.3 + """ + + +class _BaseCodecOptions(NamedTuple): + document_class: Type[Mapping[str, Any]] + tz_aware: bool + uuid_representation: int + unicode_decode_error_handler: str + tzinfo: Optional[datetime.tzinfo] + type_registry: TypeRegistry + datetime_conversion: Optional[DatetimeConversion] + + +if TYPE_CHECKING: + + class CodecOptions(Tuple, Generic[_DocumentType]): + document_class: Type[_DocumentType] + tz_aware: bool + uuid_representation: int + unicode_decode_error_handler: Optional[str] + tzinfo: Optional[datetime.tzinfo] + type_registry: TypeRegistry + datetime_conversion: Optional[int] + + def __new__( + cls: Type["CodecOptions"], + document_class: Optional[Type[_DocumentType]] = ..., + tz_aware: bool = ..., + uuid_representation: Optional[int] = ..., + unicode_decode_error_handler: Optional[str] = ..., + tzinfo: Optional[datetime.tzinfo] = ..., + type_registry: Optional[TypeRegistry] = ..., + datetime_conversion: Optional[int] = ..., + ) -> "CodecOptions[_DocumentType]": + ... + + # CodecOptions API + def with_options(self, **kwargs: Any) -> "CodecOptions[_DocumentType]": + ... + + def _arguments_repr(self) -> str: + ... + + def _options_dict(self) -> Dict[Any, Any]: + ... + + # NamedTuple API + @classmethod + def _make(cls, obj: Iterable) -> "CodecOptions[_DocumentType]": + ... + + def _asdict(self) -> Dict[str, Any]: + ... + + def _replace(self, **kwargs: Any) -> "CodecOptions[_DocumentType]": + ... + + _source: str + _fields: Tuple[str] + +else: + + class CodecOptions(_BaseCodecOptions): + """Encapsulates options used encoding and / or decoding BSON.""" + + def __init__(self, *args, **kwargs): + """Encapsulates options used encoding and / or decoding BSON. + + The `document_class` option is used to define a custom type for use + decoding BSON documents. Access to the underlying raw BSON bytes for + a document is available using the :class:`~bson.raw_bson.RawBSONDocument` + type:: + + >>> from bson.raw_bson import RawBSONDocument + >>> from bson.codec_options import CodecOptions + >>> codec_options = CodecOptions(document_class=RawBSONDocument) + >>> coll = db.get_collection('test', codec_options=codec_options) + >>> doc = coll.find_one() + >>> doc.raw + '\\x16\\x00\\x00\\x00\\x07_id\\x00[0\\x165\\x91\\x10\\xea\\x14\\xe8\\xc5\\x8b\\x93\\x00' + + The document class can be any type that inherits from + :class:`~collections.abc.MutableMapping`:: + + >>> class AttributeDict(dict): + ... # A dict that supports attribute access. + ... def __getattr__(self, key): + ... return self[key] + ... def __setattr__(self, key, value): + ... self[key] = value + ... + >>> codec_options = CodecOptions(document_class=AttributeDict) + >>> coll = db.get_collection('test', codec_options=codec_options) + >>> doc = coll.find_one() + >>> doc._id + ObjectId('5b3016359110ea14e8c58b93') + + See :doc:`/examples/datetimes` for examples using the `tz_aware` and + `tzinfo` options. + + See :doc:`/examples/uuid` for examples using the `uuid_representation` + option. + + :Parameters: + - `document_class`: BSON documents returned in queries will be decoded + to an instance of this class. Must be a subclass of + :class:`~collections.abc.MutableMapping`. Defaults to :class:`dict`. + - `tz_aware`: If ``True``, BSON datetimes will be decoded to timezone + aware instances of :class:`~datetime.datetime`. Otherwise they will be + naive. Defaults to ``False``. + - `uuid_representation`: The BSON representation to use when encoding + and decoding instances of :class:`~uuid.UUID`. Defaults to + :data:`~bson.binary.UuidRepresentation.UNSPECIFIED`. New + applications should consider setting this to + :data:`~bson.binary.UuidRepresentation.STANDARD` for cross language + compatibility. See :ref:`handling-uuid-data-example` for details. + - `unicode_decode_error_handler`: The error handler to apply when + a Unicode-related error occurs during BSON decoding that would + otherwise raise :exc:`UnicodeDecodeError`. Valid options include + 'strict', 'replace', 'backslashreplace', 'surrogateescape', and + 'ignore'. Defaults to 'strict'. + - `tzinfo`: A :class:`~datetime.tzinfo` subclass that specifies the + timezone to/from which :class:`~datetime.datetime` objects should be + encoded/decoded. + - `type_registry`: Instance of :class:`TypeRegistry` used to customize + encoding and decoding behavior. + - `datetime_conversion`: Specifies how UTC datetimes should be decoded + within BSON. Valid options include 'datetime_ms' to return as a + DatetimeMS, 'datetime' to return as a datetime.datetime and + raising a ValueError for out-of-range values, 'datetime_auto' to + return DatetimeMS objects when the underlying datetime is + out-of-range and 'datetime_clamp' to clamp to the minimum and + maximum possible datetimes. Defaults to 'datetime'. + + .. versionchanged:: 4.0 + The default for `uuid_representation` was changed from + :const:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` to + :const:`~bson.binary.UuidRepresentation.UNSPECIFIED`. + + .. versionadded:: 3.8 + `type_registry` attribute. + + .. warning:: Care must be taken when changing + `unicode_decode_error_handler` from its default value ('strict'). + The 'replace' and 'ignore' modes should not be used when documents + retrieved from the server will be modified in the client application + and stored back to the server. + """ + super().__init__() + + def __new__( + cls: Type["CodecOptions"], + document_class: Optional[Type[Mapping[str, Any]]] = None, + tz_aware: bool = False, + uuid_representation: Optional[int] = UuidRepresentation.UNSPECIFIED, + unicode_decode_error_handler: str = "strict", + tzinfo: Optional[datetime.tzinfo] = None, + type_registry: Optional[TypeRegistry] = None, + datetime_conversion: Optional[DatetimeConversion] = DatetimeConversion.DATETIME, + ) -> "CodecOptions": + doc_class = document_class or dict + # issubclass can raise TypeError for generic aliases like SON[str, Any]. + # In that case we can use the base class for the comparison. + is_mapping = False + try: + is_mapping = issubclass(doc_class, _MutableMapping) + except TypeError: + if hasattr(doc_class, "__origin__"): + is_mapping = issubclass(doc_class.__origin__, _MutableMapping) + if not (is_mapping or _raw_document_class(doc_class)): + raise TypeError( + "document_class must be dict, bson.son.SON, " + "bson.raw_bson.RawBSONDocument, or a " + "subclass of collections.abc.MutableMapping" + ) + if not isinstance(tz_aware, bool): + raise TypeError(f"tz_aware must be True or False, was: tz_aware={tz_aware}") + if uuid_representation not in ALL_UUID_REPRESENTATIONS: + raise ValueError( + "uuid_representation must be a value from bson.binary.UuidRepresentation" + ) + if not isinstance(unicode_decode_error_handler, str): + raise ValueError("unicode_decode_error_handler must be a string") + if tzinfo is not None: + if not isinstance(tzinfo, datetime.tzinfo): + raise TypeError("tzinfo must be an instance of datetime.tzinfo") + if not tz_aware: + raise ValueError("cannot specify tzinfo without also setting tz_aware=True") + + type_registry = type_registry or TypeRegistry() + + if not isinstance(type_registry, TypeRegistry): + raise TypeError("type_registry must be an instance of TypeRegistry") + + return tuple.__new__( + cls, + ( + doc_class, + tz_aware, + uuid_representation, + unicode_decode_error_handler, + tzinfo, + type_registry, + datetime_conversion, + ), + ) + + def _arguments_repr(self) -> str: + """Representation of the arguments used to create this object.""" + document_class_repr = ( + "dict" if self.document_class is dict else repr(self.document_class) + ) + + uuid_rep_repr = UUID_REPRESENTATION_NAMES.get( + self.uuid_representation, self.uuid_representation + ) + + return ( + "document_class={}, tz_aware={!r}, uuid_representation={}, " + "unicode_decode_error_handler={!r}, tzinfo={!r}, " + "type_registry={!r}, datetime_conversion={!s}".format( + document_class_repr, + self.tz_aware, + uuid_rep_repr, + self.unicode_decode_error_handler, + self.tzinfo, + self.type_registry, + self.datetime_conversion, + ) + ) + + def _options_dict(self) -> Dict[str, Any]: + """Dictionary of the arguments used to create this object.""" + # TODO: PYTHON-2442 use _asdict() instead + return { + "document_class": self.document_class, + "tz_aware": self.tz_aware, + "uuid_representation": self.uuid_representation, + "unicode_decode_error_handler": self.unicode_decode_error_handler, + "tzinfo": self.tzinfo, + "type_registry": self.type_registry, + "datetime_conversion": self.datetime_conversion, + } + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self._arguments_repr()})" + + def with_options(self, **kwargs: Any) -> "CodecOptions": + """Make a copy of this CodecOptions, overriding some options:: + + >>> from bson.codec_options import DEFAULT_CODEC_OPTIONS + >>> DEFAULT_CODEC_OPTIONS.tz_aware + False + >>> options = DEFAULT_CODEC_OPTIONS.with_options(tz_aware=True) + >>> options.tz_aware + True + + .. versionadded:: 3.5 + """ + opts = self._options_dict() + opts.update(kwargs) + return CodecOptions(**opts) + + +DEFAULT_CODEC_OPTIONS: "CodecOptions[Dict[str, Any]]" = CodecOptions() + + +def _parse_codec_options(options: Any) -> CodecOptions: + """Parse BSON codec options.""" + kwargs = {} + for k in set(options) & { + "document_class", + "tz_aware", + "uuidrepresentation", + "unicode_decode_error_handler", + "tzinfo", + "type_registry", + "datetime_conversion", + }: + if k == "uuidrepresentation": + kwargs["uuid_representation"] = options[k] + else: + kwargs[k] = options[k] + return CodecOptions(**kwargs) diff --git a/backend/test/lib/python3.8/site-packages/bson/datetime_ms.py b/backend/test/lib/python3.8/site-packages/bson/datetime_ms.py new file mode 100644 index 0000000000000000000000000000000000000000..6b9472b5b6b53238ef4c91109a45de0b726fdaf5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/datetime_ms.py @@ -0,0 +1,158 @@ +# Copyright 2022-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Tools for representing the BSON datetime type. + +.. versionadded:: 4.3 +""" + +import calendar +import datetime +import functools +from typing import Any, Union, cast + +from bson.codec_options import DEFAULT_CODEC_OPTIONS, CodecOptions, DatetimeConversion +from bson.tz_util import utc + +EPOCH_AWARE = datetime.datetime.fromtimestamp(0, utc) +EPOCH_NAIVE = EPOCH_AWARE.replace(tzinfo=None) + + +class DatetimeMS: + """Represents a BSON UTC datetime.""" + + __slots__ = ("_value",) + + def __init__(self, value: Union[int, datetime.datetime]): + """Represents a BSON UTC datetime. + + BSON UTC datetimes are defined as an int64 of milliseconds since the + Unix epoch. The principal use of DatetimeMS is to represent + datetimes outside the range of the Python builtin + :class:`~datetime.datetime` class when + encoding/decoding BSON. + + To decode UTC datetimes as a ``DatetimeMS``, `datetime_conversion` in + :class:`~bson.CodecOptions` must be set to 'datetime_ms' or + 'datetime_auto'. See :ref:`handling-out-of-range-datetimes` for + details. + + :Parameters: + - `value`: An instance of :class:`datetime.datetime` to be + represented as milliseconds since the Unix epoch, or int of + milliseconds since the Unix epoch. + """ + if isinstance(value, int): + if not (-(2**63) <= value <= 2**63 - 1): + raise OverflowError("Must be a 64-bit integer of milliseconds") + self._value = value + elif isinstance(value, datetime.datetime): + self._value = _datetime_to_millis(value) + else: + raise TypeError(f"{type(value)} is not a valid type for DatetimeMS") + + def __hash__(self) -> int: + return hash(self._value) + + def __repr__(self) -> str: + return type(self).__name__ + "(" + str(self._value) + ")" + + def __lt__(self, other: Union["DatetimeMS", int]) -> bool: + return self._value < other + + def __le__(self, other: Union["DatetimeMS", int]) -> bool: + return self._value <= other + + def __eq__(self, other: Any) -> bool: + if isinstance(other, DatetimeMS): + return self._value == other._value + return False + + def __ne__(self, other: Any) -> bool: + if isinstance(other, DatetimeMS): + return self._value != other._value + return True + + def __gt__(self, other: Union["DatetimeMS", int]) -> bool: + return self._value > other + + def __ge__(self, other: Union["DatetimeMS", int]) -> bool: + return self._value >= other + + _type_marker = 9 + + def as_datetime(self, codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS) -> datetime.datetime: + """Create a Python :class:`~datetime.datetime` from this DatetimeMS object. + + :Parameters: + - `codec_options`: A CodecOptions instance for specifying how the + resulting DatetimeMS object will be formatted using ``tz_aware`` + and ``tz_info``. Defaults to + :const:`~bson.codec_options.DEFAULT_CODEC_OPTIONS`. + """ + return cast(datetime.datetime, _millis_to_datetime(self._value, codec_options)) + + def __int__(self) -> int: + return self._value + + +# Inclusive and exclusive min and max for timezones. +# Timezones are hashed by their offset, which is a timedelta +# and therefore there are more than 24 possible timezones. +@functools.lru_cache(maxsize=None) +def _min_datetime_ms(tz: datetime.timezone = datetime.timezone.utc) -> int: + return _datetime_to_millis(datetime.datetime.min.replace(tzinfo=tz)) + + +@functools.lru_cache(maxsize=None) +def _max_datetime_ms(tz: datetime.timezone = datetime.timezone.utc) -> int: + return _datetime_to_millis(datetime.datetime.max.replace(tzinfo=tz)) + + +def _millis_to_datetime(millis: int, opts: CodecOptions) -> Union[datetime.datetime, DatetimeMS]: + """Convert milliseconds since epoch UTC to datetime.""" + if ( + opts.datetime_conversion == DatetimeConversion.DATETIME + or opts.datetime_conversion == DatetimeConversion.DATETIME_CLAMP + or opts.datetime_conversion == DatetimeConversion.DATETIME_AUTO + ): + tz = opts.tzinfo or datetime.timezone.utc + if opts.datetime_conversion == DatetimeConversion.DATETIME_CLAMP: + millis = max(_min_datetime_ms(tz), min(millis, _max_datetime_ms(tz))) + elif opts.datetime_conversion == DatetimeConversion.DATETIME_AUTO: + if not (_min_datetime_ms(tz) <= millis <= _max_datetime_ms(tz)): + return DatetimeMS(millis) + + diff = ((millis % 1000) + 1000) % 1000 + seconds = (millis - diff) // 1000 + micros = diff * 1000 + + if opts.tz_aware: + dt = EPOCH_AWARE + datetime.timedelta(seconds=seconds, microseconds=micros) + if opts.tzinfo: + dt = dt.astimezone(tz) + return dt + else: + return EPOCH_NAIVE + datetime.timedelta(seconds=seconds, microseconds=micros) + elif opts.datetime_conversion == DatetimeConversion.DATETIME_MS: + return DatetimeMS(millis) + else: + raise ValueError("datetime_conversion must be an element of DatetimeConversion") + + +def _datetime_to_millis(dtm: datetime.datetime) -> int: + """Convert datetime to milliseconds since epoch UTC.""" + if dtm.utcoffset() is not None: + dtm = dtm - dtm.utcoffset() # type: ignore + return int(calendar.timegm(dtm.timetuple()) * 1000 + dtm.microsecond // 1000) diff --git a/backend/test/lib/python3.8/site-packages/bson/dbref.py b/backend/test/lib/python3.8/site-packages/bson/dbref.py new file mode 100644 index 0000000000000000000000000000000000000000..1bd4cadcc06c7153a22aac7388292593c93219c3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/dbref.py @@ -0,0 +1,133 @@ +# Copyright 2009-2015 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for manipulating DBRefs (references to MongoDB documents).""" + +from copy import deepcopy +from typing import Any, Mapping, Optional + +from bson._helpers import _getstate_slots, _setstate_slots +from bson.son import SON + + +class DBRef: + """A reference to a document stored in MongoDB.""" + + __slots__ = "__collection", "__id", "__database", "__kwargs" + __getstate__ = _getstate_slots + __setstate__ = _setstate_slots + # DBRef isn't actually a BSON "type" so this number was arbitrarily chosen. + _type_marker = 100 + + def __init__( + self, + collection: str, + id: Any, + database: Optional[str] = None, + _extra: Optional[Mapping[str, Any]] = None, + **kwargs: Any, + ) -> None: + """Initialize a new :class:`DBRef`. + + Raises :class:`TypeError` if `collection` or `database` is not + an instance of :class:`str`. `database` is optional and allows + references to documents to work across databases. Any additional + keyword arguments will create additional fields in the resultant + embedded document. + + :Parameters: + - `collection`: name of the collection the document is stored in + - `id`: the value of the document's ``"_id"`` field + - `database` (optional): name of the database to reference + - `**kwargs` (optional): additional keyword arguments will + create additional, custom fields + + .. seealso:: The MongoDB documentation on `dbrefs <https://dochub.mongodb.org/core/dbrefs>`_. + """ + if not isinstance(collection, str): + raise TypeError("collection must be an instance of str") + if database is not None and not isinstance(database, str): + raise TypeError("database must be an instance of str") + + self.__collection = collection + self.__id = id + self.__database = database + kwargs.update(_extra or {}) + self.__kwargs = kwargs + + @property + def collection(self) -> str: + """Get the name of this DBRef's collection.""" + return self.__collection + + @property + def id(self) -> Any: + """Get this DBRef's _id.""" + return self.__id + + @property + def database(self) -> Optional[str]: + """Get the name of this DBRef's database. + + Returns None if this DBRef doesn't specify a database. + """ + return self.__database + + def __getattr__(self, key: Any) -> Any: + try: + return self.__kwargs[key] + except KeyError: + raise AttributeError(key) + + def as_doc(self) -> SON[str, Any]: + """Get the SON document representation of this DBRef. + + Generally not needed by application developers + """ + doc = SON([("$ref", self.collection), ("$id", self.id)]) + if self.database is not None: + doc["$db"] = self.database + doc.update(self.__kwargs) + return doc + + def __repr__(self) -> str: + extra = "".join([f", {k}={v!r}" for k, v in self.__kwargs.items()]) + if self.database is None: + return f"DBRef({self.collection!r}, {self.id!r}{extra})" + return f"DBRef({self.collection!r}, {self.id!r}, {self.database!r}{extra})" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, DBRef): + us = (self.__database, self.__collection, self.__id, self.__kwargs) + them = (other.__database, other.__collection, other.__id, other.__kwargs) + return us == them + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __hash__(self) -> int: + """Get a hash value for this :class:`DBRef`.""" + return hash( + (self.__collection, self.__id, self.__database, tuple(sorted(self.__kwargs.items()))) + ) + + def __deepcopy__(self, memo: Any) -> "DBRef": + """Support function for `copy.deepcopy()`.""" + return DBRef( + deepcopy(self.__collection, memo), + deepcopy(self.__id, memo), + deepcopy(self.__database, memo), + deepcopy(self.__kwargs, memo), + ) diff --git a/backend/test/lib/python3.8/site-packages/bson/decimal128.py b/backend/test/lib/python3.8/site-packages/bson/decimal128.py new file mode 100644 index 0000000000000000000000000000000000000000..fd39e947050b340cc0e3444c70d2c4bdd2690114 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/decimal128.py @@ -0,0 +1,314 @@ +# Copyright 2016-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for working with the BSON decimal128 type. + +.. versionadded:: 3.4 +""" + +import decimal +import struct +from typing import Any, Sequence, Tuple, Type, Union + +_PACK_64 = struct.Struct("<Q").pack +_UNPACK_64 = struct.Struct("<Q").unpack + +_EXPONENT_MASK = 3 << 61 +_EXPONENT_BIAS = 6176 +_EXPONENT_MAX = 6144 +_EXPONENT_MIN = -6143 +_MAX_DIGITS = 34 + +_INF = 0x7800000000000000 +_NAN = 0x7C00000000000000 +_SNAN = 0x7E00000000000000 +_SIGN = 0x8000000000000000 + +_NINF = (_INF + _SIGN, 0) +_PINF = (_INF, 0) +_NNAN = (_NAN + _SIGN, 0) +_PNAN = (_NAN, 0) +_NSNAN = (_SNAN + _SIGN, 0) +_PSNAN = (_SNAN, 0) + +_CTX_OPTIONS = { + "prec": _MAX_DIGITS, + "rounding": decimal.ROUND_HALF_EVEN, + "Emin": _EXPONENT_MIN, + "Emax": _EXPONENT_MAX, + "capitals": 1, + "flags": [], + "traps": [decimal.InvalidOperation, decimal.Overflow, decimal.Inexact], + "clamp": 1, +} + +_DEC128_CTX = decimal.Context(**_CTX_OPTIONS.copy()) # type: ignore +_VALUE_OPTIONS = Union[decimal.Decimal, float, str, Tuple[int, Sequence[int], int]] + + +def create_decimal128_context() -> decimal.Context: + """Returns an instance of :class:`decimal.Context` appropriate + for working with IEEE-754 128-bit decimal floating point values. + """ + opts = _CTX_OPTIONS.copy() + opts["traps"] = [] + return decimal.Context(**opts) # type: ignore + + +def _decimal_to_128(value: _VALUE_OPTIONS) -> Tuple[int, int]: + """Converts a decimal.Decimal to BID (high bits, low bits). + + :Parameters: + - `value`: An instance of decimal.Decimal + """ + with decimal.localcontext(_DEC128_CTX) as ctx: + value = ctx.create_decimal(value) + + if value.is_infinite(): + return _NINF if value.is_signed() else _PINF + + sign, digits, exponent = value.as_tuple() + + if value.is_nan(): + if digits: + raise ValueError("NaN with debug payload is not supported") + if value.is_snan(): + return _NSNAN if value.is_signed() else _PSNAN + return _NNAN if value.is_signed() else _PNAN + + significand = int("".join([str(digit) for digit in digits])) + bit_length = significand.bit_length() + + high = 0 + low = 0 + for i in range(min(64, bit_length)): + if significand & (1 << i): + low |= 1 << i + + for i in range(64, bit_length): + if significand & (1 << i): + high |= 1 << (i - 64) + + biased_exponent = exponent + _EXPONENT_BIAS # type: ignore[operator] + + if high >> 49 == 1: + high = high & 0x7FFFFFFFFFFF + high |= _EXPONENT_MASK + high |= (biased_exponent & 0x3FFF) << 47 + else: + high |= biased_exponent << 49 + + if sign: + high |= _SIGN + + return high, low + + +class Decimal128: + """BSON Decimal128 type:: + + >>> Decimal128(Decimal("0.0005")) + Decimal128('0.0005') + >>> Decimal128("0.0005") + Decimal128('0.0005') + >>> Decimal128((3474527112516337664, 5)) + Decimal128('0.0005') + + :Parameters: + - `value`: An instance of :class:`decimal.Decimal`, string, or tuple of + (high bits, low bits) from Binary Integer Decimal (BID) format. + + .. note:: :class:`~Decimal128` uses an instance of :class:`decimal.Context` + configured for IEEE-754 Decimal128 when validating parameters. + Signals like :class:`decimal.InvalidOperation`, :class:`decimal.Inexact`, + and :class:`decimal.Overflow` are trapped and raised as exceptions:: + + >>> Decimal128(".13.1") + Traceback (most recent call last): + File "<stdin>", line 1, in <module> + ... + decimal.InvalidOperation: [<class 'decimal.ConversionSyntax'>] + >>> + >>> Decimal128("1E-6177") + Traceback (most recent call last): + File "<stdin>", line 1, in <module> + ... + decimal.Inexact: [<class 'decimal.Inexact'>] + >>> + >>> Decimal128("1E6145") + Traceback (most recent call last): + File "<stdin>", line 1, in <module> + ... + decimal.Overflow: [<class 'decimal.Overflow'>, <class 'decimal.Rounded'>] + + To ensure the result of a calculation can always be stored as BSON + Decimal128 use the context returned by + :func:`create_decimal128_context`:: + + >>> import decimal + >>> decimal128_ctx = create_decimal128_context() + >>> with decimal.localcontext(decimal128_ctx) as ctx: + ... Decimal128(ctx.create_decimal(".13.3")) + ... + Decimal128('NaN') + >>> + >>> with decimal.localcontext(decimal128_ctx) as ctx: + ... Decimal128(ctx.create_decimal("1E-6177")) + ... + Decimal128('0E-6176') + >>> + >>> with decimal.localcontext(DECIMAL128_CTX) as ctx: + ... Decimal128(ctx.create_decimal("1E6145")) + ... + Decimal128('Infinity') + + To match the behavior of MongoDB's Decimal128 implementation + str(Decimal(value)) may not match str(Decimal128(value)) for NaN values:: + + >>> Decimal128(Decimal('NaN')) + Decimal128('NaN') + >>> Decimal128(Decimal('-NaN')) + Decimal128('NaN') + >>> Decimal128(Decimal('sNaN')) + Decimal128('NaN') + >>> Decimal128(Decimal('-sNaN')) + Decimal128('NaN') + + However, :meth:`~Decimal128.to_decimal` will return the exact value:: + + >>> Decimal128(Decimal('NaN')).to_decimal() + Decimal('NaN') + >>> Decimal128(Decimal('-NaN')).to_decimal() + Decimal('-NaN') + >>> Decimal128(Decimal('sNaN')).to_decimal() + Decimal('sNaN') + >>> Decimal128(Decimal('-sNaN')).to_decimal() + Decimal('-sNaN') + + Two instances of :class:`Decimal128` compare equal if their Binary + Integer Decimal encodings are equal:: + + >>> Decimal128('NaN') == Decimal128('NaN') + True + >>> Decimal128('NaN').bid == Decimal128('NaN').bid + True + + This differs from :class:`decimal.Decimal` comparisons for NaN:: + + >>> Decimal('NaN') == Decimal('NaN') + False + """ + + __slots__ = ("__high", "__low") + + _type_marker = 19 + + def __init__(self, value: _VALUE_OPTIONS) -> None: + if isinstance(value, (str, decimal.Decimal)): + self.__high, self.__low = _decimal_to_128(value) + elif isinstance(value, (list, tuple)): + if len(value) != 2: + raise ValueError( + "Invalid size for creation of Decimal128 " + "from list or tuple. Must have exactly 2 " + "elements." + ) + self.__high, self.__low = value # type: ignore + else: + raise TypeError(f"Cannot convert {value!r} to Decimal128") + + def to_decimal(self) -> decimal.Decimal: + """Returns an instance of :class:`decimal.Decimal` for this + :class:`Decimal128`. + """ + high = self.__high + low = self.__low + sign = 1 if (high & _SIGN) else 0 + + if (high & _SNAN) == _SNAN: + return decimal.Decimal((sign, (), "N")) # type: ignore + elif (high & _NAN) == _NAN: + return decimal.Decimal((sign, (), "n")) # type: ignore + elif (high & _INF) == _INF: + return decimal.Decimal((sign, (), "F")) # type: ignore + + if (high & _EXPONENT_MASK) == _EXPONENT_MASK: + exponent = ((high & 0x1FFFE00000000000) >> 47) - _EXPONENT_BIAS + return decimal.Decimal((sign, (0,), exponent)) + else: + exponent = ((high & 0x7FFF800000000000) >> 49) - _EXPONENT_BIAS + + arr = bytearray(15) + mask = 0x00000000000000FF + for i in range(14, 6, -1): + arr[i] = (low & mask) >> ((14 - i) << 3) + mask = mask << 8 + + mask = 0x00000000000000FF + for i in range(6, 0, -1): + arr[i] = (high & mask) >> ((6 - i) << 3) + mask = mask << 8 + + mask = 0x0001000000000000 + arr[0] = (high & mask) >> 48 + + # cdecimal only accepts a tuple for digits. + digits = tuple(int(digit) for digit in str(int.from_bytes(arr, "big"))) + + with decimal.localcontext(_DEC128_CTX) as ctx: + return ctx.create_decimal((sign, digits, exponent)) + + @classmethod + def from_bid(cls: Type["Decimal128"], value: bytes) -> "Decimal128": + """Create an instance of :class:`Decimal128` from Binary Integer + Decimal string. + + :Parameters: + - `value`: 16 byte string (128-bit IEEE 754-2008 decimal floating + point in Binary Integer Decimal (BID) format). + """ + if not isinstance(value, bytes): + raise TypeError("value must be an instance of bytes") + if len(value) != 16: + raise ValueError("value must be exactly 16 bytes") + return cls((_UNPACK_64(value[8:])[0], _UNPACK_64(value[:8])[0])) # type: ignore + + @property + def bid(self) -> bytes: + """The Binary Integer Decimal (BID) encoding of this instance.""" + return _PACK_64(self.__low) + _PACK_64(self.__high) + + def __str__(self) -> str: + dec = self.to_decimal() + if dec.is_nan(): + # Required by the drivers spec to match MongoDB behavior. + return "NaN" + return str(dec) + + def __repr__(self) -> str: + return f"Decimal128('{str(self)}')" + + def __setstate__(self, value: Tuple[int, int]) -> None: + self.__high, self.__low = value + + def __getstate__(self) -> Tuple[int, int]: + return self.__high, self.__low + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Decimal128): + return self.bid == other.bid + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other diff --git a/backend/test/lib/python3.8/site-packages/bson/errors.py b/backend/test/lib/python3.8/site-packages/bson/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..7333b27b587dfc48df0b8248c914a97b54fe2cad --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/errors.py @@ -0,0 +1,35 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Exceptions raised by the BSON package.""" + + +class BSONError(Exception): + """Base class for all BSON exceptions.""" + + +class InvalidBSON(BSONError): + """Raised when trying to create a BSON object from invalid data.""" + + +class InvalidStringData(BSONError): + """Raised when trying to encode a string containing non-UTF8 data.""" + + +class InvalidDocument(BSONError): + """Raised when trying to create a BSON object from an invalid document.""" + + +class InvalidId(BSONError): + """Raised when trying to create an ObjectId from invalid data.""" diff --git a/backend/test/lib/python3.8/site-packages/bson/int64.py b/backend/test/lib/python3.8/site-packages/bson/int64.py new file mode 100644 index 0000000000000000000000000000000000000000..ed4dfa566169f85f455d3cba675103e3cbc1f8f3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/int64.py @@ -0,0 +1,39 @@ +# Copyright 2014-2015 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A BSON wrapper for long (int in python3)""" + +from typing import Any + + +class Int64(int): + """Representation of the BSON int64 type. + + This is necessary because every integral number is an :class:`int` in + Python 3. Small integral numbers are encoded to BSON int32 by default, + but Int64 numbers will always be encoded to BSON int64. + + :Parameters: + - `value`: the numeric value to represent + """ + + __slots__ = () + + _type_marker = 18 + + def __getstate__(self) -> Any: + return {} + + def __setstate__(self, state: Any) -> None: + pass diff --git a/backend/test/lib/python3.8/site-packages/bson/json_util.py b/backend/test/lib/python3.8/site-packages/bson/json_util.py new file mode 100644 index 0000000000000000000000000000000000000000..82604f382f9fbc5ba1df4e05a43dc3918bfc7671 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/json_util.py @@ -0,0 +1,918 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for using Python's :mod:`json` module with BSON documents. + +This module provides two helper methods `dumps` and `loads` that wrap the +native :mod:`json` methods and provide explicit BSON conversion to and from +JSON. :class:`~bson.json_util.JSONOptions` provides a way to control how JSON +is emitted and parsed, with the default being the Relaxed Extended JSON format. +:mod:`~bson.json_util` can also generate Canonical or legacy `Extended JSON`_ +when :const:`CANONICAL_JSON_OPTIONS` or :const:`LEGACY_JSON_OPTIONS` is +provided, respectively. + +.. _Extended JSON: https://github.com/mongodb/specifications/blob/master/source/extended-json.rst + +Example usage (deserialization): + +.. doctest:: + + >>> from bson.json_util import loads + >>> loads( + ... '[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$scope": {}, "$code": "function x() { return 1; }"}}, {"bin": {"$type": "80", "$binary": "AQIDBA=="}}]' + ... ) + [{'foo': [1, 2]}, {'bar': {'hello': 'world'}}, {'code': Code('function x() { return 1; }', {})}, {'bin': Binary(b'...', 128)}] + +Example usage with :const:`RELAXED_JSON_OPTIONS` (the default): + +.. doctest:: + + >>> from bson import Binary, Code + >>> from bson.json_util import dumps + >>> dumps( + ... [ + ... {"foo": [1, 2]}, + ... {"bar": {"hello": "world"}}, + ... {"code": Code("function x() { return 1; }")}, + ... {"bin": Binary(b"\x01\x02\x03\x04")}, + ... ] + ... ) + '[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]' + +Example usage (with :const:`CANONICAL_JSON_OPTIONS`): + +.. doctest:: + + >>> from bson import Binary, Code + >>> from bson.json_util import dumps, CANONICAL_JSON_OPTIONS + >>> dumps( + ... [ + ... {"foo": [1, 2]}, + ... {"bar": {"hello": "world"}}, + ... {"code": Code("function x() { return 1; }")}, + ... {"bin": Binary(b"\x01\x02\x03\x04")}, + ... ], + ... json_options=CANONICAL_JSON_OPTIONS, + ... ) + '[{"foo": [{"$numberInt": "1"}, {"$numberInt": "2"}]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }"}}, {"bin": {"$binary": {"base64": "AQIDBA==", "subType": "00"}}}]' + +Example usage (with :const:`LEGACY_JSON_OPTIONS`): + +.. doctest:: + + >>> from bson import Binary, Code + >>> from bson.json_util import dumps, LEGACY_JSON_OPTIONS + >>> dumps( + ... [ + ... {"foo": [1, 2]}, + ... {"bar": {"hello": "world"}}, + ... {"code": Code("function x() { return 1; }", {})}, + ... {"bin": Binary(b"\x01\x02\x03\x04")}, + ... ], + ... json_options=LEGACY_JSON_OPTIONS, + ... ) + '[{"foo": [1, 2]}, {"bar": {"hello": "world"}}, {"code": {"$code": "function x() { return 1; }", "$scope": {}}}, {"bin": {"$binary": "AQIDBA==", "$type": "00"}}]' + +Alternatively, you can manually pass the `default` to :func:`json.dumps`. +It won't handle :class:`~bson.binary.Binary` and :class:`~bson.code.Code` +instances (as they are extended strings you can't provide custom defaults), +but it will be faster as there is less recursion. + +.. note:: + If your application does not need the flexibility offered by + :class:`JSONOptions` and spends a large amount of time in the `json_util` + module, look to + `python-bsonjs <https://pypi.python.org/pypi/python-bsonjs>`_ for a nice + performance improvement. `python-bsonjs` is a fast BSON to MongoDB + Extended JSON converter for Python built on top of + `libbson <https://github.com/mongodb/libbson>`_. `python-bsonjs` works best + with PyMongo when using :class:`~bson.raw_bson.RawBSONDocument`. +""" + +import base64 +import datetime +import json +import math +import re +import uuid +from typing import Any, Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast + +from bson.binary import ALL_UUID_SUBTYPES, UUID_SUBTYPE, Binary, UuidRepresentation +from bson.code import Code +from bson.codec_options import CodecOptions, DatetimeConversion +from bson.datetime_ms import ( + EPOCH_AWARE, + DatetimeMS, + _datetime_to_millis, + _max_datetime_ms, + _millis_to_datetime, +) +from bson.dbref import DBRef +from bson.decimal128 import Decimal128 +from bson.int64 import Int64 +from bson.max_key import MaxKey +from bson.min_key import MinKey +from bson.objectid import ObjectId +from bson.regex import Regex +from bson.son import RE_TYPE, SON +from bson.timestamp import Timestamp +from bson.tz_util import utc + +_RE_OPT_TABLE = { + "i": re.I, + "l": re.L, + "m": re.M, + "s": re.S, + "u": re.U, + "x": re.X, +} + + +class DatetimeRepresentation: + LEGACY = 0 + """Legacy MongoDB Extended JSON datetime representation. + + :class:`datetime.datetime` instances will be encoded to JSON in the + format `{"$date": <dateAsMilliseconds>}`, where `dateAsMilliseconds` is + a 64-bit signed integer giving the number of milliseconds since the Unix + epoch UTC. This was the default encoding before PyMongo version 3.4. + + .. versionadded:: 3.4 + """ + + NUMBERLONG = 1 + """NumberLong datetime representation. + + :class:`datetime.datetime` instances will be encoded to JSON in the + format `{"$date": {"$numberLong": "<dateAsMilliseconds>"}}`, + where `dateAsMilliseconds` is the string representation of a 64-bit signed + integer giving the number of milliseconds since the Unix epoch UTC. + + .. versionadded:: 3.4 + """ + + ISO8601 = 2 + """ISO-8601 datetime representation. + + :class:`datetime.datetime` instances greater than or equal to the Unix + epoch UTC will be encoded to JSON in the format `{"$date": "<ISO-8601>"}`. + :class:`datetime.datetime` instances before the Unix epoch UTC will be + encoded as if the datetime representation is + :const:`~DatetimeRepresentation.NUMBERLONG`. + + .. versionadded:: 3.4 + """ + + +class JSONMode: + LEGACY = 0 + """Legacy Extended JSON representation. + + In this mode, :func:`~bson.json_util.dumps` produces PyMongo's legacy + non-standard JSON output. Consider using + :const:`~bson.json_util.JSONMode.RELAXED` or + :const:`~bson.json_util.JSONMode.CANONICAL` instead. + + .. versionadded:: 3.5 + """ + + RELAXED = 1 + """Relaxed Extended JSON representation. + + In this mode, :func:`~bson.json_util.dumps` produces Relaxed Extended JSON, + a mostly JSON-like format. Consider using this for things like a web API, + where one is sending a document (or a projection of a document) that only + uses ordinary JSON type primitives. In particular, the ``int``, + :class:`~bson.int64.Int64`, and ``float`` numeric types are represented in + the native JSON number format. This output is also the most human readable + and is useful for debugging and documentation. + + .. seealso:: The specification for Relaxed `Extended JSON`_. + + .. versionadded:: 3.5 + """ + + CANONICAL = 2 + """Canonical Extended JSON representation. + + In this mode, :func:`~bson.json_util.dumps` produces Canonical Extended + JSON, a type preserving format. Consider using this for things like + testing, where one has to precisely specify expected types in JSON. In + particular, the ``int``, :class:`~bson.int64.Int64`, and ``float`` numeric + types are encoded with type wrappers. + + .. seealso:: The specification for Canonical `Extended JSON`_. + + .. versionadded:: 3.5 + """ + + +class JSONOptions(CodecOptions): + json_mode: int + strict_number_long: bool + datetime_representation: int + strict_uuid: bool + + def __init__(self, *args: Any, **kwargs: Any): + """Encapsulates JSON options for :func:`dumps` and :func:`loads`. + + :Parameters: + - `strict_number_long`: If ``True``, :class:`~bson.int64.Int64` objects + are encoded to MongoDB Extended JSON's *Strict mode* type + `NumberLong`, ie ``'{"$numberLong": "<number>" }'``. Otherwise they + will be encoded as an `int`. Defaults to ``False``. + - `datetime_representation`: The representation to use when encoding + instances of :class:`datetime.datetime`. Defaults to + :const:`~DatetimeRepresentation.LEGACY`. + - `strict_uuid`: If ``True``, :class:`uuid.UUID` object are encoded to + MongoDB Extended JSON's *Strict mode* type `Binary`. Otherwise it + will be encoded as ``'{"$uuid": "<hex>" }'``. Defaults to ``False``. + - `json_mode`: The :class:`JSONMode` to use when encoding BSON types to + Extended JSON. Defaults to :const:`~JSONMode.LEGACY`. + - `document_class`: BSON documents returned by :func:`loads` will be + decoded to an instance of this class. Must be a subclass of + :class:`collections.MutableMapping`. Defaults to :class:`dict`. + - `uuid_representation`: The :class:`~bson.binary.UuidRepresentation` + to use when encoding and decoding instances of :class:`uuid.UUID`. + Defaults to :const:`~bson.binary.UuidRepresentation.UNSPECIFIED`. + - `tz_aware`: If ``True``, MongoDB Extended JSON's *Strict mode* type + `Date` will be decoded to timezone aware instances of + :class:`datetime.datetime`. Otherwise they will be naive. Defaults + to ``False``. + - `tzinfo`: A :class:`datetime.tzinfo` subclass that specifies the + timezone from which :class:`~datetime.datetime` objects should be + decoded. Defaults to :const:`~bson.tz_util.utc`. + - `datetime_conversion`: Specifies how UTC datetimes should be decoded + within BSON. Valid options include 'datetime_ms' to return as a + DatetimeMS, 'datetime' to return as a datetime.datetime and + raising a ValueError for out-of-range values, 'datetime_auto' to + return DatetimeMS objects when the underlying datetime is + out-of-range and 'datetime_clamp' to clamp to the minimum and + maximum possible datetimes. Defaults to 'datetime'. See + :ref:`handling-out-of-range-datetimes` for details. + - `args`: arguments to :class:`~bson.codec_options.CodecOptions` + - `kwargs`: arguments to :class:`~bson.codec_options.CodecOptions` + + .. seealso:: The specification for Relaxed and Canonical `Extended JSON`_. + + .. versionchanged:: 4.0 + The default for `json_mode` was changed from :const:`JSONMode.LEGACY` + to :const:`JSONMode.RELAXED`. + The default for `uuid_representation` was changed from + :const:`~bson.binary.UuidRepresentation.PYTHON_LEGACY` to + :const:`~bson.binary.UuidRepresentation.UNSPECIFIED`. + + .. versionchanged:: 3.5 + Accepts the optional parameter `json_mode`. + + .. versionchanged:: 4.0 + Changed default value of `tz_aware` to False. + """ + super().__init__() + + def __new__( + cls: Type["JSONOptions"], + strict_number_long: Optional[bool] = None, + datetime_representation: Optional[int] = None, + strict_uuid: Optional[bool] = None, + json_mode: int = JSONMode.RELAXED, + *args: Any, + **kwargs: Any, + ) -> "JSONOptions": + kwargs["tz_aware"] = kwargs.get("tz_aware", False) + if kwargs["tz_aware"]: + kwargs["tzinfo"] = kwargs.get("tzinfo", utc) + if datetime_representation not in ( + DatetimeRepresentation.LEGACY, + DatetimeRepresentation.NUMBERLONG, + DatetimeRepresentation.ISO8601, + None, + ): + raise ValueError( + "JSONOptions.datetime_representation must be one of LEGACY, " + "NUMBERLONG, or ISO8601 from DatetimeRepresentation." + ) + self = cast(JSONOptions, super().__new__(cls, *args, **kwargs)) + if json_mode not in (JSONMode.LEGACY, JSONMode.RELAXED, JSONMode.CANONICAL): + raise ValueError( + "JSONOptions.json_mode must be one of LEGACY, RELAXED, " + "or CANONICAL from JSONMode." + ) + self.json_mode = json_mode + if self.json_mode == JSONMode.RELAXED: + if strict_number_long: + raise ValueError("Cannot specify strict_number_long=True with JSONMode.RELAXED") + if datetime_representation not in (None, DatetimeRepresentation.ISO8601): + raise ValueError( + "datetime_representation must be DatetimeRepresentation." + "ISO8601 or omitted with JSONMode.RELAXED" + ) + if strict_uuid not in (None, True): + raise ValueError("Cannot specify strict_uuid=False with JSONMode.RELAXED") + self.strict_number_long = False + self.datetime_representation = DatetimeRepresentation.ISO8601 + self.strict_uuid = True + elif self.json_mode == JSONMode.CANONICAL: + if strict_number_long not in (None, True): + raise ValueError("Cannot specify strict_number_long=False with JSONMode.RELAXED") + if datetime_representation not in (None, DatetimeRepresentation.NUMBERLONG): + raise ValueError( + "datetime_representation must be DatetimeRepresentation." + "NUMBERLONG or omitted with JSONMode.RELAXED" + ) + if strict_uuid not in (None, True): + raise ValueError("Cannot specify strict_uuid=False with JSONMode.RELAXED") + self.strict_number_long = True + self.datetime_representation = DatetimeRepresentation.NUMBERLONG + self.strict_uuid = True + else: # JSONMode.LEGACY + self.strict_number_long = False + self.datetime_representation = DatetimeRepresentation.LEGACY + self.strict_uuid = False + if strict_number_long is not None: + self.strict_number_long = strict_number_long + if datetime_representation is not None: + self.datetime_representation = datetime_representation + if strict_uuid is not None: + self.strict_uuid = strict_uuid + return self + + def _arguments_repr(self) -> str: + return ( + "strict_number_long={!r}, " + "datetime_representation={!r}, " + "strict_uuid={!r}, json_mode={!r}, {}".format( + self.strict_number_long, + self.datetime_representation, + self.strict_uuid, + self.json_mode, + super()._arguments_repr(), + ) + ) + + def _options_dict(self) -> Dict[Any, Any]: + # TODO: PYTHON-2442 use _asdict() instead + options_dict = super()._options_dict() + options_dict.update( + { + "strict_number_long": self.strict_number_long, + "datetime_representation": self.datetime_representation, + "strict_uuid": self.strict_uuid, + "json_mode": self.json_mode, + } + ) + return options_dict + + def with_options(self, **kwargs: Any) -> "JSONOptions": + """ + Make a copy of this JSONOptions, overriding some options:: + + >>> from bson.json_util import CANONICAL_JSON_OPTIONS + >>> CANONICAL_JSON_OPTIONS.tz_aware + True + >>> json_options = CANONICAL_JSON_OPTIONS.with_options(tz_aware=False, tzinfo=None) + >>> json_options.tz_aware + False + + .. versionadded:: 3.12 + """ + opts = self._options_dict() + for opt in ("strict_number_long", "datetime_representation", "strict_uuid", "json_mode"): + opts[opt] = kwargs.get(opt, getattr(self, opt)) + opts.update(kwargs) + return JSONOptions(**opts) + + +LEGACY_JSON_OPTIONS: JSONOptions = JSONOptions(json_mode=JSONMode.LEGACY) +""":class:`JSONOptions` for encoding to PyMongo's legacy JSON format. + +.. seealso:: The documentation for :const:`bson.json_util.JSONMode.LEGACY`. + +.. versionadded:: 3.5 +""" + +CANONICAL_JSON_OPTIONS: JSONOptions = JSONOptions(json_mode=JSONMode.CANONICAL) +""":class:`JSONOptions` for Canonical Extended JSON. + +.. seealso:: The documentation for :const:`bson.json_util.JSONMode.CANONICAL`. + +.. versionadded:: 3.5 +""" + +RELAXED_JSON_OPTIONS: JSONOptions = JSONOptions(json_mode=JSONMode.RELAXED) +""":class:`JSONOptions` for Relaxed Extended JSON. + +.. seealso:: The documentation for :const:`bson.json_util.JSONMode.RELAXED`. + +.. versionadded:: 3.5 +""" + +DEFAULT_JSON_OPTIONS: JSONOptions = RELAXED_JSON_OPTIONS +"""The default :class:`JSONOptions` for JSON encoding/decoding. + +The same as :const:`RELAXED_JSON_OPTIONS`. + +.. versionchanged:: 4.0 + Changed from :const:`LEGACY_JSON_OPTIONS` to + :const:`RELAXED_JSON_OPTIONS`. + +.. versionadded:: 3.4 +""" + + +def dumps(obj: Any, *args: Any, **kwargs: Any) -> str: + """Helper function that wraps :func:`json.dumps`. + + Recursive function that handles all BSON types including + :class:`~bson.binary.Binary` and :class:`~bson.code.Code`. + + :Parameters: + - `json_options`: A :class:`JSONOptions` instance used to modify the + encoding of MongoDB Extended JSON types. Defaults to + :const:`DEFAULT_JSON_OPTIONS`. + + .. versionchanged:: 4.0 + Now outputs MongoDB Relaxed Extended JSON by default (using + :const:`DEFAULT_JSON_OPTIONS`). + + .. versionchanged:: 3.4 + Accepts optional parameter `json_options`. See :class:`JSONOptions`. + """ + json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS) + return json.dumps(_json_convert(obj, json_options), *args, **kwargs) + + +def loads(s: Union[str, bytes, bytearray], *args: Any, **kwargs: Any) -> Any: + """Helper function that wraps :func:`json.loads`. + + Automatically passes the object_hook for BSON type conversion. + + Raises ``TypeError``, ``ValueError``, ``KeyError``, or + :exc:`~bson.errors.InvalidId` on invalid MongoDB Extended JSON. + + :Parameters: + - `json_options`: A :class:`JSONOptions` instance used to modify the + decoding of MongoDB Extended JSON types. Defaults to + :const:`DEFAULT_JSON_OPTIONS`. + + .. versionchanged:: 4.0 + Now loads :class:`datetime.datetime` instances as naive by default. To + load timezone aware instances utilize the `json_options` parameter. + See :ref:`tz_aware_default_change` for an example. + + .. versionchanged:: 3.5 + Parses Relaxed and Canonical Extended JSON as well as PyMongo's legacy + format. Now raises ``TypeError`` or ``ValueError`` when parsing JSON + type wrappers with values of the wrong type or any extra keys. + + .. versionchanged:: 3.4 + Accepts optional parameter `json_options`. See :class:`JSONOptions`. + """ + json_options = kwargs.pop("json_options", DEFAULT_JSON_OPTIONS) + kwargs["object_pairs_hook"] = lambda pairs: object_pairs_hook(pairs, json_options) + return json.loads(s, *args, **kwargs) + + +def _json_convert(obj: Any, json_options: JSONOptions = DEFAULT_JSON_OPTIONS) -> Any: + """Recursive helper method that converts BSON types so they can be + converted into json. + """ + if hasattr(obj, "items"): + return SON(((k, _json_convert(v, json_options)) for k, v in obj.items())) + elif hasattr(obj, "__iter__") and not isinstance(obj, (str, bytes)): + return [_json_convert(v, json_options) for v in obj] + try: + return default(obj, json_options) + except TypeError: + return obj + + +def object_pairs_hook( + pairs: Sequence[Tuple[str, Any]], json_options: JSONOptions = DEFAULT_JSON_OPTIONS +) -> Any: + return object_hook(json_options.document_class(pairs), json_options) + + +def object_hook(dct: Mapping[str, Any], json_options: JSONOptions = DEFAULT_JSON_OPTIONS) -> Any: + if "$oid" in dct: + return _parse_canonical_oid(dct) + if ( + isinstance(dct.get("$ref"), str) + and "$id" in dct + and isinstance(dct.get("$db"), (str, type(None))) + ): + return _parse_canonical_dbref(dct) + if "$date" in dct: + return _parse_canonical_datetime(dct, json_options) + if "$regex" in dct: + return _parse_legacy_regex(dct) + if "$minKey" in dct: + return _parse_canonical_minkey(dct) + if "$maxKey" in dct: + return _parse_canonical_maxkey(dct) + if "$binary" in dct: + if "$type" in dct: + return _parse_legacy_binary(dct, json_options) + else: + return _parse_canonical_binary(dct, json_options) + if "$code" in dct: + return _parse_canonical_code(dct) + if "$uuid" in dct: + return _parse_legacy_uuid(dct, json_options) + if "$undefined" in dct: + return None + if "$numberLong" in dct: + return _parse_canonical_int64(dct) + if "$timestamp" in dct: + tsp = dct["$timestamp"] + return Timestamp(tsp["t"], tsp["i"]) + if "$numberDecimal" in dct: + return _parse_canonical_decimal128(dct) + if "$dbPointer" in dct: + return _parse_canonical_dbpointer(dct) + if "$regularExpression" in dct: + return _parse_canonical_regex(dct) + if "$symbol" in dct: + return _parse_canonical_symbol(dct) + if "$numberInt" in dct: + return _parse_canonical_int32(dct) + if "$numberDouble" in dct: + return _parse_canonical_double(dct) + return dct + + +def _parse_legacy_regex(doc: Any) -> Any: + pattern = doc["$regex"] + # Check if this is the $regex query operator. + if not isinstance(pattern, (str, bytes)): + return doc + flags = 0 + # PyMongo always adds $options but some other tools may not. + for opt in doc.get("$options", ""): + flags |= _RE_OPT_TABLE.get(opt, 0) + return Regex(pattern, flags) + + +def _parse_legacy_uuid(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]: + """Decode a JSON legacy $uuid to Python UUID.""" + if len(doc) != 1: + raise TypeError(f"Bad $uuid, extra field(s): {doc}") + if not isinstance(doc["$uuid"], str): + raise TypeError(f"$uuid must be a string: {doc}") + if json_options.uuid_representation == UuidRepresentation.UNSPECIFIED: + return Binary.from_uuid(uuid.UUID(doc["$uuid"])) + else: + return uuid.UUID(doc["$uuid"]) + + +def _binary_or_uuid(data: Any, subtype: int, json_options: JSONOptions) -> Union[Binary, uuid.UUID]: + # special handling for UUID + if subtype in ALL_UUID_SUBTYPES: + uuid_representation = json_options.uuid_representation + binary_value = Binary(data, subtype) + if uuid_representation == UuidRepresentation.UNSPECIFIED: + return binary_value + if subtype == UUID_SUBTYPE: + # Legacy behavior: use STANDARD with binary subtype 4. + uuid_representation = UuidRepresentation.STANDARD + elif uuid_representation == UuidRepresentation.STANDARD: + # subtype == OLD_UUID_SUBTYPE + # Legacy behavior: STANDARD is the same as PYTHON_LEGACY. + uuid_representation = UuidRepresentation.PYTHON_LEGACY + return binary_value.as_uuid(uuid_representation) + + if subtype == 0: + return cast(uuid.UUID, data) + return Binary(data, subtype) + + +def _parse_legacy_binary(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]: + if isinstance(doc["$type"], int): + doc["$type"] = "%02x" % doc["$type"] + subtype = int(doc["$type"], 16) + if subtype >= 0xFFFFFF80: # Handle mongoexport values + subtype = int(doc["$type"][6:], 16) + data = base64.b64decode(doc["$binary"].encode()) + return _binary_or_uuid(data, subtype, json_options) + + +def _parse_canonical_binary(doc: Any, json_options: JSONOptions) -> Union[Binary, uuid.UUID]: + binary = doc["$binary"] + b64 = binary["base64"] + subtype = binary["subType"] + if not isinstance(b64, str): + raise TypeError(f"$binary base64 must be a string: {doc}") + if not isinstance(subtype, str) or len(subtype) > 2: + raise TypeError(f"$binary subType must be a string at most 2 characters: {doc}") + if len(binary) != 2: + raise TypeError(f'$binary must include only "base64" and "subType" components: {doc}') + + data = base64.b64decode(b64.encode()) + return _binary_or_uuid(data, int(subtype, 16), json_options) + + +def _parse_canonical_datetime( + doc: Any, json_options: JSONOptions +) -> Union[datetime.datetime, DatetimeMS]: + """Decode a JSON datetime to python datetime.datetime.""" + dtm = doc["$date"] + if len(doc) != 1: + raise TypeError(f"Bad $date, extra field(s): {doc}") + # mongoexport 2.6 and newer + if isinstance(dtm, str): + # Parse offset + if dtm[-1] == "Z": + dt = dtm[:-1] + offset = "Z" + elif dtm[-6] in ("+", "-") and dtm[-3] == ":": + # (+|-)HH:MM + dt = dtm[:-6] + offset = dtm[-6:] + elif dtm[-5] in ("+", "-"): + # (+|-)HHMM + dt = dtm[:-5] + offset = dtm[-5:] + elif dtm[-3] in ("+", "-"): + # (+|-)HH + dt = dtm[:-3] + offset = dtm[-3:] + else: + dt = dtm + offset = "" + + # Parse the optional factional seconds portion. + dot_index = dt.rfind(".") + microsecond = 0 + if dot_index != -1: + microsecond = int(float(dt[dot_index:]) * 1000000) + dt = dt[:dot_index] + + aware = datetime.datetime.strptime(dt, "%Y-%m-%dT%H:%M:%S").replace( + microsecond=microsecond, tzinfo=utc + ) + + if offset and offset != "Z": + if len(offset) == 6: + hours, minutes = offset[1:].split(":") + secs = int(hours) * 3600 + int(minutes) * 60 + elif len(offset) == 5: + secs = int(offset[1:3]) * 3600 + int(offset[3:]) * 60 + elif len(offset) == 3: + secs = int(offset[1:3]) * 3600 + if offset[0] == "-": + secs *= -1 + aware = aware - datetime.timedelta(seconds=secs) + + if json_options.tz_aware: + if json_options.tzinfo: + aware = aware.astimezone(json_options.tzinfo) + if json_options.datetime_conversion == DatetimeConversion.DATETIME_MS: + return DatetimeMS(aware) + return aware + else: + aware_tzinfo_none = aware.replace(tzinfo=None) + if json_options.datetime_conversion == DatetimeConversion.DATETIME_MS: + return DatetimeMS(aware_tzinfo_none) + return aware_tzinfo_none + return _millis_to_datetime(int(dtm), json_options) + + +def _parse_canonical_oid(doc: Any) -> ObjectId: + """Decode a JSON ObjectId to bson.objectid.ObjectId.""" + if len(doc) != 1: + raise TypeError(f"Bad $oid, extra field(s): {doc}") + return ObjectId(doc["$oid"]) + + +def _parse_canonical_symbol(doc: Any) -> str: + """Decode a JSON symbol to Python string.""" + symbol = doc["$symbol"] + if len(doc) != 1: + raise TypeError(f"Bad $symbol, extra field(s): {doc}") + return str(symbol) + + +def _parse_canonical_code(doc: Any) -> Code: + """Decode a JSON code to bson.code.Code.""" + for key in doc: + if key not in ("$code", "$scope"): + raise TypeError(f"Bad $code, extra field(s): {doc}") + return Code(doc["$code"], scope=doc.get("$scope")) + + +def _parse_canonical_regex(doc: Any) -> Regex: + """Decode a JSON regex to bson.regex.Regex.""" + regex = doc["$regularExpression"] + if len(doc) != 1: + raise TypeError(f"Bad $regularExpression, extra field(s): {doc}") + if len(regex) != 2: + raise TypeError( + 'Bad $regularExpression must include only "pattern"' + 'and "options" components: {}'.format(doc) + ) + opts = regex["options"] + if not isinstance(opts, str): + raise TypeError( + "Bad $regularExpression options, options must be string, was type %s" % (type(opts)) + ) + return Regex(regex["pattern"], opts) + + +def _parse_canonical_dbref(doc: Any) -> DBRef: + """Decode a JSON DBRef to bson.dbref.DBRef.""" + return DBRef(doc.pop("$ref"), doc.pop("$id"), database=doc.pop("$db", None), **doc) + + +def _parse_canonical_dbpointer(doc: Any) -> Any: + """Decode a JSON (deprecated) DBPointer to bson.dbref.DBRef.""" + dbref = doc["$dbPointer"] + if len(doc) != 1: + raise TypeError(f"Bad $dbPointer, extra field(s): {doc}") + if isinstance(dbref, DBRef): + dbref_doc = dbref.as_doc() + # DBPointer must not contain $db in its value. + if dbref.database is not None: + raise TypeError(f"Bad $dbPointer, extra field $db: {dbref_doc}") + if not isinstance(dbref.id, ObjectId): + raise TypeError(f"Bad $dbPointer, $id must be an ObjectId: {dbref_doc}") + if len(dbref_doc) != 2: + raise TypeError(f"Bad $dbPointer, extra field(s) in DBRef: {dbref_doc}") + return dbref + else: + raise TypeError(f"Bad $dbPointer, expected a DBRef: {doc}") + + +def _parse_canonical_int32(doc: Any) -> int: + """Decode a JSON int32 to python int.""" + i_str = doc["$numberInt"] + if len(doc) != 1: + raise TypeError(f"Bad $numberInt, extra field(s): {doc}") + if not isinstance(i_str, str): + raise TypeError(f"$numberInt must be string: {doc}") + return int(i_str) + + +def _parse_canonical_int64(doc: Any) -> Int64: + """Decode a JSON int64 to bson.int64.Int64.""" + l_str = doc["$numberLong"] + if len(doc) != 1: + raise TypeError(f"Bad $numberLong, extra field(s): {doc}") + return Int64(l_str) + + +def _parse_canonical_double(doc: Any) -> float: + """Decode a JSON double to python float.""" + d_str = doc["$numberDouble"] + if len(doc) != 1: + raise TypeError(f"Bad $numberDouble, extra field(s): {doc}") + if not isinstance(d_str, str): + raise TypeError(f"$numberDouble must be string: {doc}") + return float(d_str) + + +def _parse_canonical_decimal128(doc: Any) -> Decimal128: + """Decode a JSON decimal128 to bson.decimal128.Decimal128.""" + d_str = doc["$numberDecimal"] + if len(doc) != 1: + raise TypeError(f"Bad $numberDecimal, extra field(s): {doc}") + if not isinstance(d_str, str): + raise TypeError(f"$numberDecimal must be string: {doc}") + return Decimal128(d_str) + + +def _parse_canonical_minkey(doc: Any) -> MinKey: + """Decode a JSON MinKey to bson.min_key.MinKey.""" + if type(doc["$minKey"]) is not int or doc["$minKey"] != 1: + raise TypeError(f"$minKey value must be 1: {doc}") + if len(doc) != 1: + raise TypeError(f"Bad $minKey, extra field(s): {doc}") + return MinKey() + + +def _parse_canonical_maxkey(doc: Any) -> MaxKey: + """Decode a JSON MaxKey to bson.max_key.MaxKey.""" + if type(doc["$maxKey"]) is not int or doc["$maxKey"] != 1: + raise TypeError("$maxKey value must be 1: %s", (doc,)) + if len(doc) != 1: + raise TypeError(f"Bad $minKey, extra field(s): {doc}") + return MaxKey() + + +def _encode_binary(data: bytes, subtype: int, json_options: JSONOptions) -> Any: + if json_options.json_mode == JSONMode.LEGACY: + return SON([("$binary", base64.b64encode(data).decode()), ("$type", "%02x" % subtype)]) + return { + "$binary": SON([("base64", base64.b64encode(data).decode()), ("subType", "%02x" % subtype)]) + } + + +def default(obj: Any, json_options: JSONOptions = DEFAULT_JSON_OPTIONS) -> Any: + # We preserve key order when rendering SON, DBRef, etc. as JSON by + # returning a SON for those types instead of a dict. + if isinstance(obj, ObjectId): + return {"$oid": str(obj)} + if isinstance(obj, DBRef): + return _json_convert(obj.as_doc(), json_options=json_options) + if isinstance(obj, datetime.datetime): + if json_options.datetime_representation == DatetimeRepresentation.ISO8601: + if not obj.tzinfo: + obj = obj.replace(tzinfo=utc) + assert obj.tzinfo is not None + if obj >= EPOCH_AWARE: + off = obj.tzinfo.utcoffset(obj) + if (off.days, off.seconds, off.microseconds) == (0, 0, 0): # type: ignore + tz_string = "Z" + else: + tz_string = obj.strftime("%z") + millis = int(obj.microsecond / 1000) + fracsecs = ".%03d" % (millis,) if millis else "" + return { + "$date": "{}{}{}".format(obj.strftime("%Y-%m-%dT%H:%M:%S"), fracsecs, tz_string) + } + + millis = _datetime_to_millis(obj) + if json_options.datetime_representation == DatetimeRepresentation.LEGACY: + return {"$date": millis} + return {"$date": {"$numberLong": str(millis)}} + if isinstance(obj, DatetimeMS): + if ( + json_options.datetime_representation == DatetimeRepresentation.ISO8601 + and 0 <= int(obj) <= _max_datetime_ms() + ): + return default(obj.as_datetime(), json_options) + elif json_options.datetime_representation == DatetimeRepresentation.LEGACY: + return {"$date": str(int(obj))} + return {"$date": {"$numberLong": str(int(obj))}} + if json_options.strict_number_long and isinstance(obj, Int64): + return {"$numberLong": str(obj)} + if isinstance(obj, (RE_TYPE, Regex)): + flags = "" + if obj.flags & re.IGNORECASE: + flags += "i" + if obj.flags & re.LOCALE: + flags += "l" + if obj.flags & re.MULTILINE: + flags += "m" + if obj.flags & re.DOTALL: + flags += "s" + if obj.flags & re.UNICODE: + flags += "u" + if obj.flags & re.VERBOSE: + flags += "x" + if isinstance(obj.pattern, str): + pattern = obj.pattern + else: + pattern = obj.pattern.decode("utf-8") + if json_options.json_mode == JSONMode.LEGACY: + return SON([("$regex", pattern), ("$options", flags)]) + return {"$regularExpression": SON([("pattern", pattern), ("options", flags)])} + if isinstance(obj, MinKey): + return {"$minKey": 1} + if isinstance(obj, MaxKey): + return {"$maxKey": 1} + if isinstance(obj, Timestamp): + return {"$timestamp": SON([("t", obj.time), ("i", obj.inc)])} + if isinstance(obj, Code): + if obj.scope is None: + return {"$code": str(obj)} + return SON([("$code", str(obj)), ("$scope", _json_convert(obj.scope, json_options))]) + if isinstance(obj, Binary): + return _encode_binary(obj, obj.subtype, json_options) + if isinstance(obj, bytes): + return _encode_binary(obj, 0, json_options) + if isinstance(obj, uuid.UUID): + if json_options.strict_uuid: + binval = Binary.from_uuid(obj, uuid_representation=json_options.uuid_representation) + return _encode_binary(binval, binval.subtype, json_options) + else: + return {"$uuid": obj.hex} + if isinstance(obj, Decimal128): + return {"$numberDecimal": str(obj)} + if isinstance(obj, bool): + return obj + if json_options.json_mode == JSONMode.CANONICAL and isinstance(obj, int): + if -(2**31) <= obj < 2**31: + return {"$numberInt": str(obj)} + return {"$numberLong": str(obj)} + if json_options.json_mode != JSONMode.LEGACY and isinstance(obj, float): + if math.isnan(obj): + return {"$numberDouble": "NaN"} + elif math.isinf(obj): + representation = "Infinity" if obj > 0 else "-Infinity" + return {"$numberDouble": representation} + elif json_options.json_mode == JSONMode.CANONICAL: + # repr() will return the shortest string guaranteed to produce the + # original value, when float() is called on it. + return {"$numberDouble": str(repr(obj))} + raise TypeError("%r is not JSON serializable" % obj) diff --git a/backend/test/lib/python3.8/site-packages/bson/max_key.py b/backend/test/lib/python3.8/site-packages/bson/max_key.py new file mode 100644 index 0000000000000000000000000000000000000000..83278087b0bb52f4a824676d3467331456f7f06d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/max_key.py @@ -0,0 +1,54 @@ +# Copyright 2010-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Representation for the MongoDB internal MaxKey type.""" +from typing import Any + + +class MaxKey: + """MongoDB internal MaxKey type.""" + + __slots__ = () + + _type_marker = 127 + + def __getstate__(self) -> Any: + return {} + + def __setstate__(self, state: Any) -> None: + pass + + def __eq__(self, other: Any) -> bool: + return isinstance(other, MaxKey) + + def __hash__(self) -> int: + return hash(self._type_marker) + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __le__(self, other: Any) -> bool: + return isinstance(other, MaxKey) + + def __lt__(self, dummy: Any) -> bool: + return False + + def __ge__(self, dummy: Any) -> bool: + return True + + def __gt__(self, other: Any) -> bool: + return not isinstance(other, MaxKey) + + def __repr__(self) -> str: + return "MaxKey()" diff --git a/backend/test/lib/python3.8/site-packages/bson/min_key.py b/backend/test/lib/python3.8/site-packages/bson/min_key.py new file mode 100644 index 0000000000000000000000000000000000000000..50011df6e52c0888eb4d16d55485d4f2c7f8b446 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/min_key.py @@ -0,0 +1,54 @@ +# Copyright 2010-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Representation for the MongoDB internal MinKey type.""" +from typing import Any + + +class MinKey: + """MongoDB internal MinKey type.""" + + __slots__ = () + + _type_marker = 255 + + def __getstate__(self) -> Any: + return {} + + def __setstate__(self, state: Any) -> None: + pass + + def __eq__(self, other: Any) -> bool: + return isinstance(other, MinKey) + + def __hash__(self) -> int: + return hash(self._type_marker) + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __le__(self, dummy: Any) -> bool: + return True + + def __lt__(self, other: Any) -> bool: + return not isinstance(other, MinKey) + + def __ge__(self, other: Any) -> bool: + return isinstance(other, MinKey) + + def __gt__(self, dummy: Any) -> bool: + return False + + def __repr__(self) -> str: + return "MinKey()" diff --git a/backend/test/lib/python3.8/site-packages/bson/objectid.py b/backend/test/lib/python3.8/site-packages/bson/objectid.py new file mode 100644 index 0000000000000000000000000000000000000000..d3afe3cd3cd95243bd3bb5b72f1e80cf7e45921d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/objectid.py @@ -0,0 +1,281 @@ +# Copyright 2009-2015 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for working with MongoDB ObjectIds.""" + +import binascii +import calendar +import datetime +import os +import struct +import threading +import time +from random import SystemRandom +from typing import Any, NoReturn, Optional, Type, Union + +from bson.errors import InvalidId +from bson.tz_util import utc + +_MAX_COUNTER_VALUE = 0xFFFFFF + + +def _raise_invalid_id(oid: str) -> NoReturn: + raise InvalidId( + "%r is not a valid ObjectId, it must be a 12-byte input" + " or a 24-character hex string" % oid + ) + + +def _random_bytes() -> bytes: + """Get the 5-byte random field of an ObjectId.""" + return os.urandom(5) + + +class ObjectId: + """A MongoDB ObjectId.""" + + _pid = os.getpid() + + _inc = SystemRandom().randint(0, _MAX_COUNTER_VALUE) + _inc_lock = threading.Lock() + + __random = _random_bytes() + + __slots__ = ("__id",) + + _type_marker = 7 + + def __init__(self, oid: Optional[Union[str, "ObjectId", bytes]] = None) -> None: + """Initialize a new ObjectId. + + An ObjectId is a 12-byte unique identifier consisting of: + + - a 4-byte value representing the seconds since the Unix epoch, + - a 5-byte random value, + - a 3-byte counter, starting with a random value. + + By default, ``ObjectId()`` creates a new unique identifier. The + optional parameter `oid` can be an :class:`ObjectId`, or any 12 + :class:`bytes`. + + For example, the 12 bytes b'foo-bar-quux' do not follow the ObjectId + specification but they are acceptable input:: + + >>> ObjectId(b'foo-bar-quux') + ObjectId('666f6f2d6261722d71757578') + + `oid` can also be a :class:`str` of 24 hex digits:: + + >>> ObjectId('0123456789ab0123456789ab') + ObjectId('0123456789ab0123456789ab') + + Raises :class:`~bson.errors.InvalidId` if `oid` is not 12 bytes nor + 24 hex digits, or :class:`TypeError` if `oid` is not an accepted type. + + :Parameters: + - `oid` (optional): a valid ObjectId. + + .. seealso:: The MongoDB documentation on `ObjectIds <http://dochub.mongodb.org/core/objectids>`_. + + .. versionchanged:: 3.8 + :class:`~bson.objectid.ObjectId` now implements the `ObjectID + specification version 0.2 + <https://github.com/mongodb/specifications/blob/master/source/ + objectid.rst>`_. + """ + if oid is None: + self.__generate() + elif isinstance(oid, bytes) and len(oid) == 12: + self.__id = oid + else: + self.__validate(oid) + + @classmethod + def from_datetime(cls: Type["ObjectId"], generation_time: datetime.datetime) -> "ObjectId": + """Create a dummy ObjectId instance with a specific generation time. + + This method is useful for doing range queries on a field + containing :class:`ObjectId` instances. + + .. warning:: + It is not safe to insert a document containing an ObjectId + generated using this method. This method deliberately + eliminates the uniqueness guarantee that ObjectIds + generally provide. ObjectIds generated with this method + should be used exclusively in queries. + + `generation_time` will be converted to UTC. Naive datetime + instances will be treated as though they already contain UTC. + + An example using this helper to get documents where ``"_id"`` + was generated before January 1, 2010 would be: + + >>> gen_time = datetime.datetime(2010, 1, 1) + >>> dummy_id = ObjectId.from_datetime(gen_time) + >>> result = collection.find({"_id": {"$lt": dummy_id}}) + + :Parameters: + - `generation_time`: :class:`~datetime.datetime` to be used + as the generation time for the resulting ObjectId. + """ + offset = generation_time.utcoffset() + if offset is not None: + generation_time = generation_time - offset + timestamp = calendar.timegm(generation_time.timetuple()) + oid = struct.pack(">I", int(timestamp)) + b"\x00\x00\x00\x00\x00\x00\x00\x00" + return cls(oid) + + @classmethod + def is_valid(cls: Type["ObjectId"], oid: Any) -> bool: + """Checks if a `oid` string is valid or not. + + :Parameters: + - `oid`: the object id to validate + + .. versionadded:: 2.3 + """ + if not oid: + return False + + try: + ObjectId(oid) + return True + except (InvalidId, TypeError): + return False + + @classmethod + def _random(cls) -> bytes: + """Generate a 5-byte random number once per process.""" + pid = os.getpid() + if pid != cls._pid: + cls._pid = pid + cls.__random = _random_bytes() + return cls.__random + + def __generate(self) -> None: + """Generate a new value for this ObjectId.""" + # 4 bytes current time + oid = struct.pack(">I", int(time.time())) + + # 5 bytes random + oid += ObjectId._random() + + # 3 bytes inc + with ObjectId._inc_lock: + oid += struct.pack(">I", ObjectId._inc)[1:4] + ObjectId._inc = (ObjectId._inc + 1) % (_MAX_COUNTER_VALUE + 1) + + self.__id = oid + + def __validate(self, oid: Any) -> None: + """Validate and use the given id for this ObjectId. + + Raises TypeError if id is not an instance of :class:`str`, + :class:`bytes`, or ObjectId. Raises InvalidId if it is not a + valid ObjectId. + + :Parameters: + - `oid`: a valid ObjectId + """ + if isinstance(oid, ObjectId): + self.__id = oid.binary + elif isinstance(oid, str): + if len(oid) == 24: + try: + self.__id = bytes.fromhex(oid) + except (TypeError, ValueError): + _raise_invalid_id(oid) + else: + _raise_invalid_id(oid) + else: + raise TypeError(f"id must be an instance of (bytes, str, ObjectId), not {type(oid)}") + + @property + def binary(self) -> bytes: + """12-byte binary representation of this ObjectId.""" + return self.__id + + @property + def generation_time(self) -> datetime.datetime: + """A :class:`datetime.datetime` instance representing the time of + generation for this :class:`ObjectId`. + + The :class:`datetime.datetime` is timezone aware, and + represents the generation time in UTC. It is precise to the + second. + """ + timestamp = struct.unpack(">I", self.__id[0:4])[0] + return datetime.datetime.fromtimestamp(timestamp, utc) + + def __getstate__(self) -> bytes: + """Return value of object for pickling. + needed explicitly because __slots__() defined. + """ + return self.__id + + def __setstate__(self, value: Any) -> None: + """Explicit state set from pickling""" + # Provide backwards compatibility with OIDs + # pickled with pymongo-1.9 or older. + if isinstance(value, dict): + oid = value["_ObjectId__id"] + else: + oid = value + # ObjectIds pickled in python 2.x used `str` for __id. + # In python 3.x this has to be converted to `bytes` + # by encoding latin-1. + if isinstance(oid, str): + self.__id = oid.encode("latin-1") + else: + self.__id = oid + + def __str__(self) -> str: + return binascii.hexlify(self.__id).decode() + + def __repr__(self) -> str: + return f"ObjectId('{str(self)}')" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ObjectId): + return self.__id == other.binary + return NotImplemented + + def __ne__(self, other: Any) -> bool: + if isinstance(other, ObjectId): + return self.__id != other.binary + return NotImplemented + + def __lt__(self, other: Any) -> bool: + if isinstance(other, ObjectId): + return self.__id < other.binary + return NotImplemented + + def __le__(self, other: Any) -> bool: + if isinstance(other, ObjectId): + return self.__id <= other.binary + return NotImplemented + + def __gt__(self, other: Any) -> bool: + if isinstance(other, ObjectId): + return self.__id > other.binary + return NotImplemented + + def __ge__(self, other: Any) -> bool: + if isinstance(other, ObjectId): + return self.__id >= other.binary + return NotImplemented + + def __hash__(self) -> int: + """Get a hash value for this :class:`ObjectId`.""" + return hash(self.__id) diff --git a/backend/test/lib/python3.8/site-packages/bson/py.typed b/backend/test/lib/python3.8/site-packages/bson/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..0f4057061a763b445a4300825a450069a96f5719 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/py.typed @@ -0,0 +1,2 @@ +# PEP-561 Support File. +# "Package maintainers who wish to support type checking of their code MUST add a marker file named py.typed to their package supporting typing". diff --git a/backend/test/lib/python3.8/site-packages/bson/raw_bson.py b/backend/test/lib/python3.8/site-packages/bson/raw_bson.py new file mode 100644 index 0000000000000000000000000000000000000000..d5dbe8fbf9eaf8c6c8a982b0ff6f1dd7feb40eaa --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/raw_bson.py @@ -0,0 +1,196 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for representing raw BSON documents. + +Inserting and Retrieving RawBSONDocuments +========================================= + +Example: Moving a document between different databases/collections + +.. doctest:: + + >>> import bson + >>> from pymongo import MongoClient + >>> from bson.raw_bson import RawBSONDocument + >>> client = MongoClient(document_class=RawBSONDocument) + >>> client.drop_database("db") + >>> client.drop_database("replica_db") + >>> db = client.db + >>> result = db.test.insert_many( + ... [{"_id": 1, "a": 1}, {"_id": 2, "b": 1}, {"_id": 3, "c": 1}, {"_id": 4, "d": 1}] + ... ) + >>> replica_db = client.replica_db + >>> for doc in db.test.find(): + ... print(f"raw document: {doc.raw}") + ... print(f"decoded document: {bson.decode(doc.raw)}") + ... result = replica_db.test.insert_one(doc) + ... + raw document: b'...' + decoded document: {'_id': 1, 'a': 1} + raw document: b'...' + decoded document: {'_id': 2, 'b': 1} + raw document: b'...' + decoded document: {'_id': 3, 'c': 1} + raw document: b'...' + decoded document: {'_id': 4, 'd': 1} + +For use cases like moving documents across different databases or writing binary +blobs to disk, using raw BSON documents provides better speed and avoids the +overhead of decoding or encoding BSON. +""" + +from typing import Any, Dict, ItemsView, Iterator, Mapping, Optional + +from bson import _get_object_size, _raw_to_dict +from bson.codec_options import _RAW_BSON_DOCUMENT_MARKER +from bson.codec_options import DEFAULT_CODEC_OPTIONS as DEFAULT +from bson.codec_options import CodecOptions +from bson.son import SON + + +def _inflate_bson( + bson_bytes: bytes, codec_options: CodecOptions, raw_array: bool = False +) -> Dict[Any, Any]: + """Inflates the top level fields of a BSON document. + + :Parameters: + - `bson_bytes`: the BSON bytes that compose this document + - `codec_options`: An instance of + :class:`~bson.codec_options.CodecOptions` whose ``document_class`` + must be :class:`RawBSONDocument`. + """ + # Use SON to preserve ordering of elements. + return _raw_to_dict( + bson_bytes, 4, len(bson_bytes) - 1, codec_options, SON(), raw_array=raw_array + ) + + +class RawBSONDocument(Mapping[str, Any]): + """Representation for a MongoDB document that provides access to the raw + BSON bytes that compose it. + + Only when a field is accessed or modified within the document does + RawBSONDocument decode its bytes. + """ + + __slots__ = ("__raw", "__inflated_doc", "__codec_options") + _type_marker = _RAW_BSON_DOCUMENT_MARKER + + def __init__(self, bson_bytes: bytes, codec_options: Optional[CodecOptions] = None) -> None: + """Create a new :class:`RawBSONDocument` + + :class:`RawBSONDocument` is a representation of a BSON document that + provides access to the underlying raw BSON bytes. Only when a field is + accessed or modified within the document does RawBSONDocument decode + its bytes. + + :class:`RawBSONDocument` implements the ``Mapping`` abstract base + class from the standard library so it can be used like a read-only + ``dict``:: + + >>> from bson import encode + >>> raw_doc = RawBSONDocument(encode({'_id': 'my_doc'})) + >>> raw_doc.raw + b'...' + >>> raw_doc['_id'] + 'my_doc' + + :Parameters: + - `bson_bytes`: the BSON bytes that compose this document + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions` whose ``document_class`` + must be :class:`RawBSONDocument`. The default is + :attr:`DEFAULT_RAW_BSON_OPTIONS`. + + .. versionchanged:: 3.8 + :class:`RawBSONDocument` now validates that the ``bson_bytes`` + passed in represent a single bson document. + + .. versionchanged:: 3.5 + If a :class:`~bson.codec_options.CodecOptions` is passed in, its + `document_class` must be :class:`RawBSONDocument`. + """ + self.__raw = bson_bytes + self.__inflated_doc: Optional[Mapping[str, Any]] = None + # Can't default codec_options to DEFAULT_RAW_BSON_OPTIONS in signature, + # it refers to this class RawBSONDocument. + if codec_options is None: + codec_options = DEFAULT_RAW_BSON_OPTIONS + elif not issubclass(codec_options.document_class, RawBSONDocument): + raise TypeError( + "RawBSONDocument cannot use CodecOptions with document " + "class {}".format(codec_options.document_class) + ) + self.__codec_options = codec_options + # Validate the bson object size. + _get_object_size(bson_bytes, 0, len(bson_bytes)) + + @property + def raw(self) -> bytes: + """The raw BSON bytes composing this document.""" + return self.__raw + + def items(self) -> ItemsView[str, Any]: + """Lazily decode and iterate elements in this document.""" + return self.__inflated.items() + + @property + def __inflated(self) -> Mapping[str, Any]: + if self.__inflated_doc is None: + # We already validated the object's size when this document was + # created, so no need to do that again. + # Use SON to preserve ordering of elements. + self.__inflated_doc = self._inflate_bson(self.__raw, self.__codec_options) + return self.__inflated_doc + + @staticmethod + def _inflate_bson(bson_bytes: bytes, codec_options: CodecOptions) -> Mapping[Any, Any]: + return _inflate_bson(bson_bytes, codec_options) + + def __getitem__(self, item: str) -> Any: + return self.__inflated[item] + + def __iter__(self) -> Iterator[str]: + return iter(self.__inflated) + + def __len__(self) -> int: + return len(self.__inflated) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, RawBSONDocument): + return self.__raw == other.raw + return NotImplemented + + def __repr__(self) -> str: + return "{}({!r}, codec_options={!r})".format( + self.__class__.__name__, + self.raw, + self.__codec_options, + ) + + +class _RawArrayBSONDocument(RawBSONDocument): + """A RawBSONDocument that only expands sub-documents and arrays when accessed.""" + + @staticmethod + def _inflate_bson(bson_bytes: bytes, codec_options: CodecOptions) -> Mapping[Any, Any]: + return _inflate_bson(bson_bytes, codec_options, raw_array=True) + + +DEFAULT_RAW_BSON_OPTIONS: CodecOptions = DEFAULT.with_options(document_class=RawBSONDocument) +_RAW_ARRAY_BSON_OPTIONS: CodecOptions = DEFAULT.with_options(document_class=_RawArrayBSONDocument) +"""The default :class:`~bson.codec_options.CodecOptions` for +:class:`RawBSONDocument`. +""" diff --git a/backend/test/lib/python3.8/site-packages/bson/regex.py b/backend/test/lib/python3.8/site-packages/bson/regex.py new file mode 100644 index 0000000000000000000000000000000000000000..fe852fdfce07211446ba480d4acdaf7f6f2a4542 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/regex.py @@ -0,0 +1,134 @@ +# Copyright 2013-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for representing MongoDB regular expressions.""" + +import re +from typing import Any, Generic, Pattern, Type, TypeVar, Union + +from bson._helpers import _getstate_slots, _setstate_slots +from bson.son import RE_TYPE + + +def str_flags_to_int(str_flags: str) -> int: + flags = 0 + if "i" in str_flags: + flags |= re.IGNORECASE + if "l" in str_flags: + flags |= re.LOCALE + if "m" in str_flags: + flags |= re.MULTILINE + if "s" in str_flags: + flags |= re.DOTALL + if "u" in str_flags: + flags |= re.UNICODE + if "x" in str_flags: + flags |= re.VERBOSE + + return flags + + +_T = TypeVar("_T", str, bytes) + + +class Regex(Generic[_T]): + """BSON regular expression data.""" + + __slots__ = ("pattern", "flags") + + __getstate__ = _getstate_slots + __setstate__ = _setstate_slots + + _type_marker = 11 + + @classmethod + def from_native(cls: Type["Regex"], regex: "Pattern[_T]") -> "Regex[_T]": + """Convert a Python regular expression into a ``Regex`` instance. + + Note that in Python 3, a regular expression compiled from a + :class:`str` has the ``re.UNICODE`` flag set. If it is undesirable + to store this flag in a BSON regular expression, unset it first:: + + >>> pattern = re.compile('.*') + >>> regex = Regex.from_native(pattern) + >>> regex.flags ^= re.UNICODE + >>> db.collection.insert_one({'pattern': regex}) + + :Parameters: + - `regex`: A regular expression object from ``re.compile()``. + + .. warning:: + Python regular expressions use a different syntax and different + set of flags than MongoDB, which uses `PCRE`_. A regular + expression retrieved from the server may not compile in + Python, or may match a different set of strings in Python than + when used in a MongoDB query. + + .. _PCRE: http://www.pcre.org/ + """ + if not isinstance(regex, RE_TYPE): + raise TypeError("regex must be a compiled regular expression, not %s" % type(regex)) + + return Regex(regex.pattern, regex.flags) + + def __init__(self, pattern: _T, flags: Union[str, int] = 0) -> None: + """BSON regular expression data. + + This class is useful to store and retrieve regular expressions that are + incompatible with Python's regular expression dialect. + + :Parameters: + - `pattern`: string + - `flags`: (optional) an integer bitmask, or a string of flag + characters like "im" for IGNORECASE and MULTILINE + """ + if not isinstance(pattern, (str, bytes)): + raise TypeError("pattern must be a string, not %s" % type(pattern)) + self.pattern: _T = pattern + + if isinstance(flags, str): + self.flags = str_flags_to_int(flags) + elif isinstance(flags, int): + self.flags = flags + else: + raise TypeError("flags must be a string or int, not %s" % type(flags)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Regex): + return self.pattern == other.pattern and self.flags == other.flags + else: + return NotImplemented + + __hash__ = None # type: ignore + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __repr__(self) -> str: + return f"Regex({self.pattern!r}, {self.flags!r})" + + def try_compile(self) -> "Pattern[_T]": + """Compile this :class:`Regex` as a Python regular expression. + + .. warning:: + Python regular expressions use a different syntax and different + set of flags than MongoDB, which uses `PCRE`_. A regular + expression retrieved from the server may not compile in + Python, or may match a different set of strings in Python than + when used in a MongoDB query. :meth:`try_compile()` may raise + :exc:`re.error`. + + .. _PCRE: http://www.pcre.org/ + """ + return re.compile(self.pattern, self.flags) diff --git a/backend/test/lib/python3.8/site-packages/bson/son.py b/backend/test/lib/python3.8/site-packages/bson/son.py new file mode 100644 index 0000000000000000000000000000000000000000..7be749ceca75ecd5e9d07b3a97b03e2620c23b11 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/son.py @@ -0,0 +1,208 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for creating and manipulating SON, the Serialized Ocument Notation. + +Regular dictionaries can be used instead of SON objects, but not when the order +of keys is important. A SON object can be used just like a normal Python +dictionary. +""" + +import copy +import re +from collections.abc import Mapping as _Mapping +from typing import ( + Any, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, +) + +# This sort of sucks, but seems to be as good as it gets... +# This is essentially the same as re._pattern_type +RE_TYPE: Type[Pattern[Any]] = type(re.compile("")) + +_Key = TypeVar("_Key") +_Value = TypeVar("_Value") +_T = TypeVar("_T") + + +class SON(Dict[_Key, _Value]): + """SON data. + + A subclass of dict that maintains ordering of keys and provides a + few extra niceties for dealing with SON. SON provides an API + similar to collections.OrderedDict. + """ + + __keys: List[Any] + + def __init__( + self, + data: Optional[Union[Mapping[_Key, _Value], Iterable[Tuple[_Key, _Value]]]] = None, + **kwargs: Any, + ) -> None: + self.__keys = [] + dict.__init__(self) + self.update(data) + self.update(kwargs) + + def __new__(cls: Type["SON[_Key, _Value]"], *args: Any, **kwargs: Any) -> "SON[_Key, _Value]": + instance = super().__new__(cls, *args, **kwargs) # type: ignore[type-var] + instance.__keys = [] + return instance + + def __repr__(self) -> str: + result = [] + for key in self.__keys: + result.append(f"({key!r}, {self[key]!r})") + return "SON([%s])" % ", ".join(result) + + def __setitem__(self, key: _Key, value: _Value) -> None: + if key not in self.__keys: + self.__keys.append(key) + dict.__setitem__(self, key, value) + + def __delitem__(self, key: _Key) -> None: + self.__keys.remove(key) + dict.__delitem__(self, key) + + def copy(self) -> "SON[_Key, _Value]": + other: SON[_Key, _Value] = SON() + other.update(self) + return other + + # TODO this is all from UserDict.DictMixin. it could probably be made more + # efficient. + # second level definitions support higher levels + def __iter__(self) -> Iterator[_Key]: + yield from self.__keys + + def has_key(self, key: _Key) -> bool: + return key in self.__keys + + def iterkeys(self) -> Iterator[_Key]: + return self.__iter__() + + # fourth level uses definitions from lower levels + def itervalues(self) -> Iterator[_Value]: + for _, v in self.items(): + yield v + + def values(self) -> List[_Value]: # type: ignore[override] + return [v for _, v in self.items()] + + def clear(self) -> None: + self.__keys = [] + super().clear() + + def setdefault(self, key: _Key, default: _Value) -> _Value: + try: + return self[key] + except KeyError: + self[key] = default + return default + + def pop(self, key: _Key, *args: Union[_Value, _T]) -> Union[_Value, _T]: + if len(args) > 1: + raise TypeError("pop expected at most 2 arguments, got " + repr(1 + len(args))) + try: + value = self[key] + except KeyError: + if args: + return args[0] + raise + del self[key] + return value + + def popitem(self) -> Tuple[_Key, _Value]: + try: + k, v = next(iter(self.items())) + except StopIteration: + raise KeyError("container is empty") + del self[k] + return (k, v) + + def update(self, other: Optional[Any] = None, **kwargs: _Value) -> None: # type: ignore[override] + # Make progressively weaker assumptions about "other" + if other is None: + pass + elif hasattr(other, "items"): + for k, v in other.items(): + self[k] = v + elif hasattr(other, "keys"): + for k in other.keys(): + self[k] = other[k] + else: + for k, v in other: + self[k] = v + if kwargs: + self.update(kwargs) + + def get(self, key: _Key, default: Optional[Union[_Value, _T]] = None) -> Union[_Value, _T, None]: # type: ignore[override] + try: + return self[key] + except KeyError: + return default + + def __eq__(self, other: Any) -> bool: + """Comparison to another SON is order-sensitive while comparison to a + regular dictionary is order-insensitive. + """ + if isinstance(other, SON): + return len(self) == len(other) and list(self.items()) == list(other.items()) + return self.to_dict() == other + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __len__(self) -> int: + return len(self.__keys) + + def to_dict(self) -> Dict[_Key, _Value]: + """Convert a SON document to a normal Python dictionary instance. + + This is trickier than just *dict(...)* because it needs to be + recursive. + """ + + def transform_value(value: Any) -> Any: + if isinstance(value, list): + return [transform_value(v) for v in value] + elif isinstance(value, _Mapping): + return {k: transform_value(v) for k, v in value.items()} + else: + return value + + return transform_value(dict(self)) + + def __deepcopy__(self, memo: Dict[int, "SON[_Key, _Value]"]) -> "SON[_Key, _Value]": + out: SON[_Key, _Value] = SON() + val_id = id(self) + if val_id in memo: + return memo[val_id] + memo[val_id] = out + for k, v in self.items(): + if not isinstance(v, RE_TYPE): + v = copy.deepcopy(v, memo) + out[k] = v + return out diff --git a/backend/test/lib/python3.8/site-packages/bson/time64.c b/backend/test/lib/python3.8/site-packages/bson/time64.c new file mode 100644 index 0000000000000000000000000000000000000000..a21fbb90bd6f86b7363603c403357d3e87192d14 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/time64.c @@ -0,0 +1,781 @@ +/* + +Copyright (c) 2007-2010 Michael G Schwern + +This software originally derived from Paul Sheer's pivotal_gmtime_r.c. + +The MIT License: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + +*/ + +/* + +Programmers who have available to them 64-bit time values as a 'long +long' type can use cbson_localtime64_r() and cbson_gmtime64_r() which correctly +converts the time even on 32-bit systems. Whether you have 64-bit time +values will depend on the operating system. + +cbson_localtime64_r() is a 64-bit equivalent of localtime_r(). + +cbson_gmtime64_r() is a 64-bit equivalent of gmtime_r(). + +*/ + +#ifdef _MSC_VER + #define _CRT_SECURE_NO_WARNINGS +#endif + +/* Including Python.h fixes issues with interpreters built with -std=c99. */ +#define PY_SSIZE_T_CLEAN +#include "Python.h" + +#include <time.h> +#include "time64.h" +#include "time64_limits.h" + + +/* Spec says except for stftime() and the _r() functions, these + all return static memory. Stabbings! */ +static struct TM Static_Return_Date; + +static const int days_in_month[2][12] = { + {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, + {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}, +}; + +static const int julian_days_by_month[2][12] = { + {0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334}, + {0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335}, +}; + +static const int length_of_year[2] = { 365, 366 }; + +/* Some numbers relating to the gregorian cycle */ +static const Year years_in_gregorian_cycle = 400; +#define days_in_gregorian_cycle ((365 * 400) + 100 - 4 + 1) +static const Time64_T seconds_in_gregorian_cycle = days_in_gregorian_cycle * 60LL * 60LL * 24LL; + +/* Year range we can trust the time functions with */ +#define MAX_SAFE_YEAR 2037 +#define MIN_SAFE_YEAR 1971 + +/* 28 year Julian calendar cycle */ +#define SOLAR_CYCLE_LENGTH 28 + +/* Year cycle from MAX_SAFE_YEAR down. */ +static const int safe_years_high[SOLAR_CYCLE_LENGTH] = { + 2016, 2017, 2018, 2019, + 2020, 2021, 2022, 2023, + 2024, 2025, 2026, 2027, + 2028, 2029, 2030, 2031, + 2032, 2033, 2034, 2035, + 2036, 2037, 2010, 2011, + 2012, 2013, 2014, 2015 +}; + +/* Year cycle from MIN_SAFE_YEAR up */ +static const int safe_years_low[SOLAR_CYCLE_LENGTH] = { + 1996, 1997, 1998, 1971, + 1972, 1973, 1974, 1975, + 1976, 1977, 1978, 1979, + 1980, 1981, 1982, 1983, + 1984, 1985, 1986, 1987, + 1988, 1989, 1990, 1991, + 1992, 1993, 1994, 1995, +}; + +/* Let's assume people are going to be looking for dates in the future. + Let's provide some cheats so you can skip ahead. + This has a 4x speed boost when near 2008. +*/ +/* Number of days since epoch on Jan 1st, 2008 GMT */ +#define CHEAT_DAYS (1199145600 / 24 / 60 / 60) +#define CHEAT_YEARS 108 + +#define IS_LEAP(n) ((!(((n) + 1900) % 400) || (!(((n) + 1900) % 4) && (((n) + 1900) % 100))) != 0) +#define _TIME64_WRAP(a,b,m) ((a) = ((a) < 0 ) ? ((b)--, (a) + (m)) : (a)) + +#ifdef USE_SYSTEM_LOCALTIME +# define SHOULD_USE_SYSTEM_LOCALTIME(a) ( \ + (a) <= SYSTEM_LOCALTIME_MAX && \ + (a) >= SYSTEM_LOCALTIME_MIN \ +) +#else +# define SHOULD_USE_SYSTEM_LOCALTIME(a) (0) +#endif + +#ifdef USE_SYSTEM_GMTIME +# define SHOULD_USE_SYSTEM_GMTIME(a) ( \ + (a) <= SYSTEM_GMTIME_MAX && \ + (a) >= SYSTEM_GMTIME_MIN \ +) +#else +# define SHOULD_USE_SYSTEM_GMTIME(a) (0) +#endif + +/* Multi varadic macros are a C99 thing, alas */ +#ifdef TIME_64_DEBUG +# define TIME64_TRACE(format) (fprintf(stderr, format)) +# define TIME64_TRACE1(format, var1) (fprintf(stderr, format, var1)) +# define TIME64_TRACE2(format, var1, var2) (fprintf(stderr, format, var1, var2)) +# define TIME64_TRACE3(format, var1, var2, var3) (fprintf(stderr, format, var1, var2, var3)) +#else +# define TIME64_TRACE(format) ((void)0) +# define TIME64_TRACE1(format, var1) ((void)0) +# define TIME64_TRACE2(format, var1, var2) ((void)0) +# define TIME64_TRACE3(format, var1, var2, var3) ((void)0) +#endif + + +static int is_exception_century(Year year) +{ + int is_exception = ((year % 100 == 0) && !(year % 400 == 0)); + TIME64_TRACE1("# is_exception_century: %s\n", is_exception ? "yes" : "no"); + + return(is_exception); +} + + +/* Compare two dates. + The result is like cmp. + Ignores things like gmtoffset and dst +*/ +int cbson_cmp_date( const struct TM* left, const struct tm* right ) { + if( left->tm_year > right->tm_year ) + return 1; + else if( left->tm_year < right->tm_year ) + return -1; + + if( left->tm_mon > right->tm_mon ) + return 1; + else if( left->tm_mon < right->tm_mon ) + return -1; + + if( left->tm_mday > right->tm_mday ) + return 1; + else if( left->tm_mday < right->tm_mday ) + return -1; + + if( left->tm_hour > right->tm_hour ) + return 1; + else if( left->tm_hour < right->tm_hour ) + return -1; + + if( left->tm_min > right->tm_min ) + return 1; + else if( left->tm_min < right->tm_min ) + return -1; + + if( left->tm_sec > right->tm_sec ) + return 1; + else if( left->tm_sec < right->tm_sec ) + return -1; + + return 0; +} + + +/* Check if a date is safely inside a range. + The intention is to check if its a few days inside. +*/ +int cbson_date_in_safe_range( const struct TM* date, const struct tm* min, const struct tm* max ) { + if( cbson_cmp_date(date, min) == -1 ) + return 0; + + if( cbson_cmp_date(date, max) == 1 ) + return 0; + + return 1; +} + + +/* timegm() is not in the C or POSIX spec, but it is such a useful + extension I would be remiss in leaving it out. Also I need it + for cbson_localtime64() +*/ +Time64_T cbson_timegm64(const struct TM *date) { + Time64_T days = 0; + Time64_T seconds = 0; + Year year; + Year orig_year = (Year)date->tm_year; + int cycles = 0; + + if( orig_year > 100 ) { + cycles = (int)((orig_year - 100) / 400); + orig_year -= cycles * 400; + days += (Time64_T)cycles * days_in_gregorian_cycle; + } + else if( orig_year < -300 ) { + cycles = (int)((orig_year - 100) / 400); + orig_year -= cycles * 400; + days += (Time64_T)cycles * days_in_gregorian_cycle; + } + TIME64_TRACE3("# timegm/ cycles: %d, days: %lld, orig_year: %lld\n", cycles, days, orig_year); + + if( orig_year > 70 ) { + year = 70; + while( year < orig_year ) { + days += length_of_year[IS_LEAP(year)]; + year++; + } + } + else if ( orig_year < 70 ) { + year = 69; + do { + days -= length_of_year[IS_LEAP(year)]; + year--; + } while( year >= orig_year ); + } + + days += julian_days_by_month[IS_LEAP(orig_year)][date->tm_mon]; + days += date->tm_mday - 1; + + seconds = days * 60 * 60 * 24; + + seconds += date->tm_hour * 60 * 60; + seconds += date->tm_min * 60; + seconds += date->tm_sec; + + return(seconds); +} + + +#ifndef NDEBUG +static int check_tm(struct TM *tm) +{ + /* Don't forget leap seconds */ + assert(tm->tm_sec >= 0); + assert(tm->tm_sec <= 61); + + assert(tm->tm_min >= 0); + assert(tm->tm_min <= 59); + + assert(tm->tm_hour >= 0); + assert(tm->tm_hour <= 23); + + assert(tm->tm_mday >= 1); + assert(tm->tm_mday <= days_in_month[IS_LEAP(tm->tm_year)][tm->tm_mon]); + + assert(tm->tm_mon >= 0); + assert(tm->tm_mon <= 11); + + assert(tm->tm_wday >= 0); + assert(tm->tm_wday <= 6); + + assert(tm->tm_yday >= 0); + assert(tm->tm_yday <= length_of_year[IS_LEAP(tm->tm_year)]); + +#ifdef HAS_TM_TM_GMTOFF + assert(tm->tm_gmtoff >= -24 * 60 * 60); + assert(tm->tm_gmtoff <= 24 * 60 * 60); +#endif + + return 1; +} +#endif + + +/* The exceptional centuries without leap years cause the cycle to + shift by 16 +*/ +static Year cycle_offset(Year year) +{ + const Year start_year = 2000; + Year year_diff = year - start_year; + Year exceptions; + + if( year > start_year ) + year_diff--; + + exceptions = year_diff / 100; + exceptions -= year_diff / 400; + + TIME64_TRACE3("# year: %lld, exceptions: %lld, year_diff: %lld\n", + year, exceptions, year_diff); + + return exceptions * 16; +} + +/* For a given year after 2038, pick the latest possible matching + year in the 28 year calendar cycle. + + A matching year... + 1) Starts on the same day of the week. + 2) Has the same leap year status. + + This is so the calendars match up. + + Also the previous year must match. When doing Jan 1st you might + wind up on Dec 31st the previous year when doing a -UTC time zone. + + Finally, the next year must have the same start day of week. This + is for Dec 31st with a +UTC time zone. + It doesn't need the same leap year status since we only care about + January 1st. +*/ +static int safe_year(const Year year) +{ + int safe_year = 0; + Year year_cycle; + + if( year >= MIN_SAFE_YEAR && year <= MAX_SAFE_YEAR ) { + return (int)year; + } + + year_cycle = year + cycle_offset(year); + + /* safe_years_low is off from safe_years_high by 8 years */ + if( year < MIN_SAFE_YEAR ) + year_cycle -= 8; + + /* Change non-leap xx00 years to an equivalent */ + if( is_exception_century(year) ) + year_cycle += 11; + + /* Also xx01 years, since the previous year will be wrong */ + if( is_exception_century(year - 1) ) + year_cycle += 17; + + year_cycle %= SOLAR_CYCLE_LENGTH; + if( year_cycle < 0 ) + year_cycle = SOLAR_CYCLE_LENGTH + year_cycle; + + assert( year_cycle >= 0 ); + assert( year_cycle < SOLAR_CYCLE_LENGTH ); + if( year < MIN_SAFE_YEAR ) + safe_year = safe_years_low[year_cycle]; + else if( year > MAX_SAFE_YEAR ) + safe_year = safe_years_high[year_cycle]; + else + assert(0); + + TIME64_TRACE3("# year: %lld, year_cycle: %lld, safe_year: %d\n", + year, year_cycle, safe_year); + + assert(safe_year <= MAX_SAFE_YEAR && safe_year >= MIN_SAFE_YEAR); + + return safe_year; +} + + +void pymongo_copy_tm_to_TM64(const struct tm *src, struct TM *dest) { + if( src == NULL ) { + memset(dest, 0, sizeof(*dest)); + } + else { +# ifdef USE_TM64 + dest->tm_sec = src->tm_sec; + dest->tm_min = src->tm_min; + dest->tm_hour = src->tm_hour; + dest->tm_mday = src->tm_mday; + dest->tm_mon = src->tm_mon; + dest->tm_year = (Year)src->tm_year; + dest->tm_wday = src->tm_wday; + dest->tm_yday = src->tm_yday; + dest->tm_isdst = src->tm_isdst; + +# ifdef HAS_TM_TM_GMTOFF + dest->tm_gmtoff = src->tm_gmtoff; +# endif + +# ifdef HAS_TM_TM_ZONE + dest->tm_zone = src->tm_zone; +# endif + +# else + /* They're the same type */ + memcpy(dest, src, sizeof(*dest)); +# endif + } +} + + +void cbson_copy_TM64_to_tm(const struct TM *src, struct tm *dest) { + if( src == NULL ) { + memset(dest, 0, sizeof(*dest)); + } + else { +# ifdef USE_TM64 + dest->tm_sec = src->tm_sec; + dest->tm_min = src->tm_min; + dest->tm_hour = src->tm_hour; + dest->tm_mday = src->tm_mday; + dest->tm_mon = src->tm_mon; + dest->tm_year = (int)src->tm_year; + dest->tm_wday = src->tm_wday; + dest->tm_yday = src->tm_yday; + dest->tm_isdst = src->tm_isdst; + +# ifdef HAS_TM_TM_GMTOFF + dest->tm_gmtoff = src->tm_gmtoff; +# endif + +# ifdef HAS_TM_TM_ZONE + dest->tm_zone = src->tm_zone; +# endif + +# else + /* They're the same type */ + memcpy(dest, src, sizeof(*dest)); +# endif + } +} + + +/* Simulate localtime_r() to the best of our ability */ +struct tm * cbson_fake_localtime_r(const time_t *time, struct tm *result) { + const struct tm *static_result = localtime(time); + + assert(result != NULL); + + if( static_result == NULL ) { + memset(result, 0, sizeof(*result)); + return NULL; + } + else { + memcpy(result, static_result, sizeof(*result)); + return result; + } +} + + +/* Simulate gmtime_r() to the best of our ability */ +struct tm * cbson_fake_gmtime_r(const time_t *time, struct tm *result) { + const struct tm *static_result = gmtime(time); + + assert(result != NULL); + + if( static_result == NULL ) { + memset(result, 0, sizeof(*result)); + return NULL; + } + else { + memcpy(result, static_result, sizeof(*result)); + return result; + } +} + + +static Time64_T seconds_between_years(Year left_year, Year right_year) { + int increment = (left_year > right_year) ? 1 : -1; + Time64_T seconds = 0; + int cycles; + + if( left_year > 2400 ) { + cycles = (int)((left_year - 2400) / 400); + left_year -= cycles * 400; + seconds += cycles * seconds_in_gregorian_cycle; + } + else if( left_year < 1600 ) { + cycles = (int)((left_year - 1600) / 400); + left_year += cycles * 400; + seconds += cycles * seconds_in_gregorian_cycle; + } + + while( left_year != right_year ) { + seconds += length_of_year[IS_LEAP(right_year - 1900)] * 60 * 60 * 24; + right_year += increment; + } + + return seconds * increment; +} + + +Time64_T cbson_mktime64(const struct TM *input_date) { + struct tm safe_date; + struct TM date; + Time64_T time; + Year year = input_date->tm_year + 1900; + + if( cbson_date_in_safe_range(input_date, &SYSTEM_MKTIME_MIN, &SYSTEM_MKTIME_MAX) ) + { + cbson_copy_TM64_to_tm(input_date, &safe_date); + return (Time64_T)mktime(&safe_date); + } + + /* Have to make the year safe in date else it won't fit in safe_date */ + date = *input_date; + date.tm_year = safe_year(year) - 1900; + cbson_copy_TM64_to_tm(&date, &safe_date); + + time = (Time64_T)mktime(&safe_date); + + time += seconds_between_years(year, (Year)(safe_date.tm_year + 1900)); + + return time; +} + + +/* Because I think mktime() is a crappy name */ +Time64_T timelocal64(const struct TM *date) { + return cbson_mktime64(date); +} + + +struct TM *cbson_gmtime64_r (const Time64_T *in_time, struct TM *p) +{ + int v_tm_sec, v_tm_min, v_tm_hour, v_tm_mon, v_tm_wday; + Time64_T v_tm_tday; + int leap; + Time64_T m; + Time64_T time = *in_time; + Year year = 70; + int cycles = 0; + + assert(p != NULL); + +#ifdef USE_SYSTEM_GMTIME + /* Use the system gmtime() if time_t is small enough */ + if( SHOULD_USE_SYSTEM_GMTIME(*in_time) ) { + time_t safe_time = (time_t)*in_time; + struct tm safe_date; + GMTIME_R(&safe_time, &safe_date); + + pymongo_copy_tm_to_TM64(&safe_date, p); + assert(check_tm(p)); + + return p; + } +#endif + +#ifdef HAS_TM_TM_GMTOFF + p->tm_gmtoff = 0; +#endif + p->tm_isdst = 0; + +#ifdef HAS_TM_TM_ZONE + p->tm_zone = "UTC"; +#endif + + v_tm_sec = (int)(time % 60); + time /= 60; + v_tm_min = (int)(time % 60); + time /= 60; + v_tm_hour = (int)(time % 24); + time /= 24; + v_tm_tday = time; + + _TIME64_WRAP (v_tm_sec, v_tm_min, 60); + _TIME64_WRAP (v_tm_min, v_tm_hour, 60); + _TIME64_WRAP (v_tm_hour, v_tm_tday, 24); + + v_tm_wday = (int)((v_tm_tday + 4) % 7); + if (v_tm_wday < 0) + v_tm_wday += 7; + m = v_tm_tday; + + if (m >= CHEAT_DAYS) { + year = CHEAT_YEARS; + m -= CHEAT_DAYS; + } + + if (m >= 0) { + /* Gregorian cycles, this is huge optimization for distant times */ + cycles = (int)(m / (Time64_T) days_in_gregorian_cycle); + if( cycles ) { + m -= (cycles * (Time64_T) days_in_gregorian_cycle); + year += (cycles * years_in_gregorian_cycle); + } + + /* Years */ + leap = IS_LEAP (year); + while (m >= (Time64_T) length_of_year[leap]) { + m -= (Time64_T) length_of_year[leap]; + year++; + leap = IS_LEAP (year); + } + + /* Months */ + v_tm_mon = 0; + while (m >= (Time64_T) days_in_month[leap][v_tm_mon]) { + m -= (Time64_T) days_in_month[leap][v_tm_mon]; + v_tm_mon++; + } + } else { + year--; + + /* Gregorian cycles */ + cycles = (int)((m / (Time64_T) days_in_gregorian_cycle) + 1); + if( cycles ) { + m -= (cycles * (Time64_T) days_in_gregorian_cycle); + year += (cycles * years_in_gregorian_cycle); + } + + /* Years */ + leap = IS_LEAP (year); + while (m < (Time64_T) -length_of_year[leap]) { + m += (Time64_T) length_of_year[leap]; + year--; + leap = IS_LEAP (year); + } + + /* Months */ + v_tm_mon = 11; + while (m < (Time64_T) -days_in_month[leap][v_tm_mon]) { + m += (Time64_T) days_in_month[leap][v_tm_mon]; + v_tm_mon--; + } + m += (Time64_T) days_in_month[leap][v_tm_mon]; + } + + p->tm_year = (int)year; + if( p->tm_year != year ) { +#ifdef EOVERFLOW + errno = EOVERFLOW; +#endif + return NULL; + } + + /* At this point m is less than a year so casting to an int is safe */ + p->tm_mday = (int) m + 1; + p->tm_yday = julian_days_by_month[leap][v_tm_mon] + (int)m; + p->tm_sec = v_tm_sec; + p->tm_min = v_tm_min; + p->tm_hour = v_tm_hour; + p->tm_mon = v_tm_mon; + p->tm_wday = v_tm_wday; + + assert(check_tm(p)); + + return p; +} + + +struct TM *cbson_localtime64_r (const Time64_T *time, struct TM *local_tm) +{ + time_t safe_time; + struct tm safe_date; + struct TM gm_tm; + Year orig_year; + int month_diff; + + assert(local_tm != NULL); + +#ifdef USE_SYSTEM_LOCALTIME + /* Use the system localtime() if time_t is small enough */ + if( SHOULD_USE_SYSTEM_LOCALTIME(*time) ) { + safe_time = (time_t)*time; + + TIME64_TRACE1("Using system localtime for %lld\n", *time); + + LOCALTIME_R(&safe_time, &safe_date); + + pymongo_copy_tm_to_TM64(&safe_date, local_tm); + assert(check_tm(local_tm)); + + return local_tm; + } +#endif + + if( cbson_gmtime64_r(time, &gm_tm) == NULL ) { + TIME64_TRACE1("cbson_gmtime64_r returned null for %lld\n", *time); + return NULL; + } + + orig_year = gm_tm.tm_year; + + if (gm_tm.tm_year > (2037 - 1900) || + gm_tm.tm_year < (1970 - 1900) + ) + { + TIME64_TRACE1("Mapping tm_year %lld to safe_year\n", (Year)gm_tm.tm_year); + gm_tm.tm_year = safe_year((Year)(gm_tm.tm_year + 1900)) - 1900; + } + + safe_time = (time_t)cbson_timegm64(&gm_tm); + if( LOCALTIME_R(&safe_time, &safe_date) == NULL ) { + TIME64_TRACE1("localtime_r(%d) returned NULL\n", (int)safe_time); + return NULL; + } + + pymongo_copy_tm_to_TM64(&safe_date, local_tm); + + local_tm->tm_year = (int)orig_year; + if( local_tm->tm_year != orig_year ) { + TIME64_TRACE2("tm_year overflow: tm_year %lld, orig_year %lld\n", + (Year)local_tm->tm_year, (Year)orig_year); + +#ifdef EOVERFLOW + errno = EOVERFLOW; +#endif + return NULL; + } + + + month_diff = local_tm->tm_mon - gm_tm.tm_mon; + + /* When localtime is Dec 31st previous year and + gmtime is Jan 1st next year. + */ + if( month_diff == 11 ) { + local_tm->tm_year--; + } + + /* When localtime is Jan 1st, next year and + gmtime is Dec 31st, previous year. + */ + if( month_diff == -11 ) { + local_tm->tm_year++; + } + + /* GMT is Jan 1st, xx01 year, but localtime is still Dec 31st + in a non-leap xx00. There is one point in the cycle + we can't account for which the safe xx00 year is a leap + year. So we need to correct for Dec 31st coming out as + the 366th day of the year. + */ + if( !IS_LEAP(local_tm->tm_year) && local_tm->tm_yday == 365 ) + local_tm->tm_yday--; + + assert(check_tm(local_tm)); + + return local_tm; +} + + +int cbson_valid_tm_wday( const struct TM* date ) { + if( 0 <= date->tm_wday && date->tm_wday <= 6 ) + return 1; + else + return 0; +} + +int cbson_valid_tm_mon( const struct TM* date ) { + if( 0 <= date->tm_mon && date->tm_mon <= 11 ) + return 1; + else + return 0; +} + + +/* Non-thread safe versions of the above */ +struct TM *cbson_localtime64(const Time64_T *time) { +#ifdef _MSC_VER + _tzset(); +#else + tzset(); +#endif + return cbson_localtime64_r(time, &Static_Return_Date); +} + +struct TM *cbson_gmtime64(const Time64_T *time) { + return cbson_gmtime64_r(time, &Static_Return_Date); +} diff --git a/backend/test/lib/python3.8/site-packages/bson/time64.h b/backend/test/lib/python3.8/site-packages/bson/time64.h new file mode 100644 index 0000000000000000000000000000000000000000..6321eb307e034fb363c08d5da1be2207391b8daf --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/time64.h @@ -0,0 +1,67 @@ +#ifndef TIME64_H +# define TIME64_H + +#include <time.h> +#include "time64_config.h" + +/* Set our custom types */ +typedef INT_64_T Int64; +typedef Int64 Time64_T; +typedef Int64 Year; + + +/* A copy of the tm struct but with a 64 bit year */ +struct TM64 { + int tm_sec; + int tm_min; + int tm_hour; + int tm_mday; + int tm_mon; + Year tm_year; + int tm_wday; + int tm_yday; + int tm_isdst; + +#ifdef HAS_TM_TM_GMTOFF + long tm_gmtoff; +#endif + +#ifdef HAS_TM_TM_ZONE + char *tm_zone; +#endif +}; + + +/* Decide which tm struct to use */ +#ifdef USE_TM64 +#define TM TM64 +#else +#define TM tm +#endif + + +/* Declare public functions */ +struct TM *cbson_gmtime64_r (const Time64_T *, struct TM *); +struct TM *cbson_localtime64_r (const Time64_T *, struct TM *); +struct TM *cbson_gmtime64 (const Time64_T *); +struct TM *cbson_localtime64 (const Time64_T *); + +Time64_T cbson_timegm64 (const struct TM *); +Time64_T cbson_mktime64 (const struct TM *); +Time64_T timelocal64 (const struct TM *); + + +/* Not everyone has gm/localtime_r(), provide a replacement */ +#ifdef HAS_LOCALTIME_R +# define LOCALTIME_R(clock, result) localtime_r(clock, result) +#else +# define LOCALTIME_R(clock, result) cbson_fake_localtime_r(clock, result) +#endif +#ifdef HAS_GMTIME_R +# define GMTIME_R(clock, result) gmtime_r(clock, result) +#else +# define GMTIME_R(clock, result) cbson_fake_gmtime_r(clock, result) +#endif + + +#endif diff --git a/backend/test/lib/python3.8/site-packages/bson/time64_config.h b/backend/test/lib/python3.8/site-packages/bson/time64_config.h new file mode 100644 index 0000000000000000000000000000000000000000..9d4c111c95cd378f3314d722d4b3efb216eeb7b4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/time64_config.h @@ -0,0 +1,78 @@ +/* Configuration + ------------- + Define as appropriate for your system. + Sensible defaults provided. +*/ + + +#ifndef TIME64_CONFIG_H +# define TIME64_CONFIG_H + +/* Debugging + TIME_64_DEBUG + Define if you want debugging messages +*/ +/* #define TIME_64_DEBUG */ + + +/* INT_64_T + A 64 bit integer type to use to store time and others. + Must be defined. +*/ +#define INT_64_T long long + + +/* USE_TM64 + Should we use a 64 bit safe replacement for tm? This will + let you go past year 2 billion but the struct will be incompatible + with tm. Conversion functions will be provided. +*/ +/* #define USE_TM64 */ + + +/* Availability of system functions. + + HAS_GMTIME_R + Define if your system has gmtime_r() + + HAS_LOCALTIME_R + Define if your system has localtime_r() + + HAS_TIMEGM + Define if your system has timegm(), a GNU extension. +*/ +#if !defined(WIN32) && !defined(_MSC_VER) +#define HAS_GMTIME_R +#define HAS_LOCALTIME_R +#endif +/* #define HAS_TIMEGM */ + + +/* Details of non-standard tm struct elements. + + HAS_TM_TM_GMTOFF + True if your tm struct has a "tm_gmtoff" element. + A BSD extension. + + HAS_TM_TM_ZONE + True if your tm struct has a "tm_zone" element. + A BSD extension. +*/ +/* #define HAS_TM_TM_GMTOFF */ +/* #define HAS_TM_TM_ZONE */ + + +/* USE_SYSTEM_LOCALTIME + USE_SYSTEM_GMTIME + USE_SYSTEM_MKTIME + USE_SYSTEM_TIMEGM + Should we use the system functions if the time is inside their range? + Your system localtime() is probably more accurate, but our gmtime() is + fast and safe. +*/ +#define USE_SYSTEM_LOCALTIME +/* #define USE_SYSTEM_GMTIME */ +#define USE_SYSTEM_MKTIME +/* #define USE_SYSTEM_TIMEGM */ + +#endif /* TIME64_CONFIG_H */ diff --git a/backend/test/lib/python3.8/site-packages/bson/time64_limits.h b/backend/test/lib/python3.8/site-packages/bson/time64_limits.h new file mode 100644 index 0000000000000000000000000000000000000000..1d30607baeb0163dca92cfa523c27e1ef6db7f4b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/time64_limits.h @@ -0,0 +1,95 @@ +/* + Maximum and minimum inputs your system's respective time functions + can correctly handle. time64.h will use your system functions if + the input falls inside these ranges and corresponding USE_SYSTEM_* + constant is defined. +*/ + +#ifndef TIME64_LIMITS_H +#define TIME64_LIMITS_H + +/* Max/min for localtime() */ +#define SYSTEM_LOCALTIME_MAX 2147483647 +#define SYSTEM_LOCALTIME_MIN -2147483647-1 + +/* Max/min for gmtime() */ +#define SYSTEM_GMTIME_MAX 2147483647 +#define SYSTEM_GMTIME_MIN -2147483647-1 + +/* Max/min for mktime() */ +static const struct tm SYSTEM_MKTIME_MAX = { + 7, + 14, + 19, + 18, + 0, + 138, + 1, + 17, + 0 +#ifdef HAS_TM_TM_GMTOFF + ,-28800 +#endif +#ifdef HAS_TM_TM_ZONE + ,"PST" +#endif +}; + +static const struct tm SYSTEM_MKTIME_MIN = { + 52, + 45, + 12, + 13, + 11, + 1, + 5, + 346, + 0 +#ifdef HAS_TM_TM_GMTOFF + ,-28800 +#endif +#ifdef HAS_TM_TM_ZONE + ,"PST" +#endif +}; + +/* Max/min for timegm() */ +#ifdef HAS_TIMEGM +static const struct tm SYSTEM_TIMEGM_MAX = { + 7, + 14, + 3, + 19, + 0, + 138, + 2, + 18, + 0 + #ifdef HAS_TM_TM_GMTOFF + ,0 + #endif + #ifdef HAS_TM_TM_ZONE + ,"UTC" + #endif +}; + +static const struct tm SYSTEM_TIMEGM_MIN = { + 52, + 45, + 20, + 13, + 11, + 1, + 5, + 346, + 0 + #ifdef HAS_TM_TM_GMTOFF + ,0 + #endif + #ifdef HAS_TM_TM_ZONE + ,"UTC" + #endif +}; +#endif /* HAS_TIMEGM */ + +#endif /* TIME64_LIMITS_H */ diff --git a/backend/test/lib/python3.8/site-packages/bson/timestamp.py b/backend/test/lib/python3.8/site-packages/bson/timestamp.py new file mode 100644 index 0000000000000000000000000000000000000000..168f2824df07ffdcfa41fd1ae0ba2176cb44f5e5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/timestamp.py @@ -0,0 +1,123 @@ +# Copyright 2010-2015 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for representing MongoDB internal Timestamps.""" + +import calendar +import datetime +from typing import Any, Union + +from bson._helpers import _getstate_slots, _setstate_slots +from bson.tz_util import utc + +UPPERBOUND = 4294967296 + + +class Timestamp: + """MongoDB internal timestamps used in the opLog.""" + + __slots__ = ("__time", "__inc") + + __getstate__ = _getstate_slots + __setstate__ = _setstate_slots + + _type_marker = 17 + + def __init__(self, time: Union[datetime.datetime, int], inc: int) -> None: + """Create a new :class:`Timestamp`. + + This class is only for use with the MongoDB opLog. If you need + to store a regular timestamp, please use a + :class:`~datetime.datetime`. + + Raises :class:`TypeError` if `time` is not an instance of + :class: `int` or :class:`~datetime.datetime`, or `inc` is not + an instance of :class:`int`. Raises :class:`ValueError` if + `time` or `inc` is not in [0, 2**32). + + :Parameters: + - `time`: time in seconds since epoch UTC, or a naive UTC + :class:`~datetime.datetime`, or an aware + :class:`~datetime.datetime` + - `inc`: the incrementing counter + """ + if isinstance(time, datetime.datetime): + offset = time.utcoffset() + if offset is not None: + time = time - offset + time = int(calendar.timegm(time.timetuple())) + if not isinstance(time, int): + raise TypeError("time must be an instance of int") + if not isinstance(inc, int): + raise TypeError("inc must be an instance of int") + if not 0 <= time < UPPERBOUND: + raise ValueError("time must be contained in [0, 2**32)") + if not 0 <= inc < UPPERBOUND: + raise ValueError("inc must be contained in [0, 2**32)") + + self.__time = time + self.__inc = inc + + @property + def time(self) -> int: + """Get the time portion of this :class:`Timestamp`.""" + return self.__time + + @property + def inc(self) -> int: + """Get the inc portion of this :class:`Timestamp`.""" + return self.__inc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Timestamp): + return self.__time == other.time and self.__inc == other.inc + else: + return NotImplemented + + def __hash__(self) -> int: + return hash(self.time) ^ hash(self.inc) + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __lt__(self, other: Any) -> bool: + if isinstance(other, Timestamp): + return (self.time, self.inc) < (other.time, other.inc) + return NotImplemented + + def __le__(self, other: Any) -> bool: + if isinstance(other, Timestamp): + return (self.time, self.inc) <= (other.time, other.inc) + return NotImplemented + + def __gt__(self, other: Any) -> bool: + if isinstance(other, Timestamp): + return (self.time, self.inc) > (other.time, other.inc) + return NotImplemented + + def __ge__(self, other: Any) -> bool: + if isinstance(other, Timestamp): + return (self.time, self.inc) >= (other.time, other.inc) + return NotImplemented + + def __repr__(self) -> str: + return f"Timestamp({self.__time}, {self.__inc})" + + def as_datetime(self) -> datetime.datetime: + """Return a :class:`~datetime.datetime` instance corresponding + to the time portion of this :class:`Timestamp`. + + The returned datetime's timezone is UTC. + """ + return datetime.datetime.fromtimestamp(self.__time, utc) diff --git a/backend/test/lib/python3.8/site-packages/bson/typings.py b/backend/test/lib/python3.8/site-packages/bson/typings.py new file mode 100644 index 0000000000000000000000000000000000000000..c796c65c4ee8cf53f01204625e4b52776a8f105e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/typings.py @@ -0,0 +1,29 @@ +# Copyright 2023-Present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Type aliases used by bson""" +from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, TypeVar, Union + +if TYPE_CHECKING: + from array import array + from mmap import mmap + + from bson.raw_bson import RawBSONDocument + + +# Common Shared Types. +_DocumentOut = Union[MutableMapping[str, Any], "RawBSONDocument"] +_DocumentType = TypeVar("_DocumentType", bound=Mapping[str, Any]) +_DocumentTypeArg = TypeVar("_DocumentTypeArg", bound=Mapping[str, Any]) +_ReadableBuffer = Union[bytes, memoryview, "mmap", "array"] diff --git a/backend/test/lib/python3.8/site-packages/bson/tz_util.py b/backend/test/lib/python3.8/site-packages/bson/tz_util.py new file mode 100644 index 0000000000000000000000000000000000000000..8106c77b408cea36e990058b14c8bc5888ab54b8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/bson/tz_util.py @@ -0,0 +1,52 @@ +# Copyright 2010-2015 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Timezone related utilities for BSON.""" + +from datetime import datetime, timedelta, tzinfo +from typing import Optional, Tuple, Union + +ZERO: timedelta = timedelta(0) + + +class FixedOffset(tzinfo): + """Fixed offset timezone, in minutes east from UTC. + + Implementation based from the Python `standard library documentation + <http://docs.python.org/library/datetime.html#tzinfo-objects>`_. + Defining __getinitargs__ enables pickling / copying. + """ + + def __init__(self, offset: Union[float, timedelta], name: str) -> None: + if isinstance(offset, timedelta): + self.__offset = offset + else: + self.__offset = timedelta(minutes=offset) + self.__name = name + + def __getinitargs__(self) -> Tuple[timedelta, str]: + return self.__offset, self.__name + + def utcoffset(self, dt: Optional[datetime]) -> timedelta: + return self.__offset + + def tzname(self, dt: Optional[datetime]) -> str: + return self.__name + + def dst(self, dt: Optional[datetime]) -> timedelta: + return ZERO + + +utc: FixedOffset = FixedOffset(0, "UTC") +"""Fixed offset timezone representing UTC.""" diff --git a/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/LICENSE.rst b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/LICENSE.rst new file mode 100644 index 0000000000000000000000000000000000000000..d12a849186982399c537c5b9a8fd77bf2edd5eab --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..7a6bbb24b5f05575ac0263dd7fb24e0f0180d641 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/METADATA @@ -0,0 +1,103 @@ +Metadata-Version: 2.1 +Name: click +Version: 8.1.7 +Summary: Composable command line interface toolkit +Home-page: https://palletsprojects.com/p/click/ +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://click.palletsprojects.com/ +Project-URL: Changes, https://click.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/click/ +Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ +Project-URL: Chat, https://discord.gg/pallets +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst +Requires-Dist: colorama ; platform_system == "Windows" +Requires-Dist: importlib-metadata ; python_version < "3.8" + +\$ click\_ +========== + +Click is a Python package for creating beautiful command line interfaces +in a composable way with as little code as necessary. It's the "Command +Line Interface Creation Kit". It's highly configurable but comes with +sensible defaults out of the box. + +It aims to make the process of writing command line tools quick and fun +while also preventing any frustration caused by the inability to +implement an intended CLI API. + +Click in three points: + +- Arbitrary nesting of commands +- Automatic help page generation +- Supports lazy loading of subcommands at runtime + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U click + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + import click + + @click.command() + @click.option("--count", default=1, help="Number of greetings.") + @click.option("--name", prompt="Your name", help="The person to greet.") + def hello(count, name): + """Simple program that greets NAME for a total of COUNT times.""" + for _ in range(count): + click.echo(f"Hello, {name}!") + + if __name__ == '__main__': + hello() + +.. code-block:: text + + $ python hello.py --count=3 + Your name: Click + Hello, Click! + Hello, Click! + Hello, Click! + + +Donate +------ + +The Pallets organization develops and supports Click and other popular +packages. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://click.palletsprojects.com/ +- Changes: https://click.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/click/ +- Source Code: https://github.com/pallets/click +- Issue Tracker: https://github.com/pallets/click/issues +- Chat: https://discord.gg/pallets diff --git a/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..8daf861cbba2e92911af21a41ec0abd0d5518fe5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/RECORD @@ -0,0 +1,39 @@ +click-8.1.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +click-8.1.7.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 +click-8.1.7.dist-info/METADATA,sha256=qIMevCxGA9yEmJOM_4WHuUJCwWpsIEVbCPOhs45YPN4,3014 +click-8.1.7.dist-info/RECORD,, +click-8.1.7.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92 +click-8.1.7.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 +click/__init__.py,sha256=YDDbjm406dTOA0V8bTtdGnhN7zj5j-_dFRewZF_pLvw,3138 +click/__pycache__/__init__.cpython-38.pyc,, +click/__pycache__/_compat.cpython-38.pyc,, +click/__pycache__/_termui_impl.cpython-38.pyc,, +click/__pycache__/_textwrap.cpython-38.pyc,, +click/__pycache__/_winconsole.cpython-38.pyc,, +click/__pycache__/core.cpython-38.pyc,, +click/__pycache__/decorators.cpython-38.pyc,, +click/__pycache__/exceptions.cpython-38.pyc,, +click/__pycache__/formatting.cpython-38.pyc,, +click/__pycache__/globals.cpython-38.pyc,, +click/__pycache__/parser.cpython-38.pyc,, +click/__pycache__/shell_completion.cpython-38.pyc,, +click/__pycache__/termui.cpython-38.pyc,, +click/__pycache__/testing.cpython-38.pyc,, +click/__pycache__/types.cpython-38.pyc,, +click/__pycache__/utils.cpython-38.pyc,, +click/_compat.py,sha256=5318agQpbt4kroKsbqDOYpTSWzL_YCZVUQiTT04yXmc,18744 +click/_termui_impl.py,sha256=3dFYv4445Nw-rFvZOTBMBPYwB1bxnmNk9Du6Dm_oBSU,24069 +click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 +click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 +click/core.py,sha256=j6oEWtGgGna8JarD6WxhXmNnxLnfRjwXglbBc-8jr7U,114086 +click/decorators.py,sha256=-ZlbGYgV-oI8jr_oH4RpuL1PFS-5QmeuEAsLDAYgxtw,18719 +click/exceptions.py,sha256=fyROO-47HWFDjt2qupo7A3J32VlpM-ovJnfowu92K3s,9273 +click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 +click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961 +click/parser.py,sha256=LKyYQE9ZLj5KgIDXkrcTHQRXIggfoivX14_UVIn56YA,19067 +click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +click/shell_completion.py,sha256=Ty3VM_ts0sQhj6u7eFTiLwHPoTgcXTGEAUg2OpLqYKw,18460 +click/termui.py,sha256=H7Q8FpmPelhJ2ovOhfCRhjMtCpNyjFXryAMLZODqsdc,28324 +click/testing.py,sha256=1Qd4kS5bucn1hsNIRryd0WtTMuCpkA93grkWxT8POsU,16084 +click/types.py,sha256=TZvz3hKvBztf-Hpa2enOmP4eznSPLzijjig5b_0XMxE,36391 +click/utils.py,sha256=1476UduUNY6UePGU4m18uzVHLt1sKM2PP3yWsQhbItM,20298 diff --git a/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..2c08da084599354e5b2dbccb3ab716165e63d1a0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..dca9a909647e3b066931de2909c2d1e65c78c995 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click-8.1.7.dist-info/top_level.txt @@ -0,0 +1 @@ +click diff --git a/backend/test/lib/python3.8/site-packages/click/__init__.py b/backend/test/lib/python3.8/site-packages/click/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9a1dab048917edc420af440c73bd1d689de6b3fa --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/__init__.py @@ -0,0 +1,73 @@ +""" +Click is a simple Python module inspired by the stdlib optparse to make +writing command line scripts fun. Unlike other modules, it's based +around a simple API that does not come with too much magic and is +composable. +""" +from .core import Argument as Argument +from .core import BaseCommand as BaseCommand +from .core import Command as Command +from .core import CommandCollection as CommandCollection +from .core import Context as Context +from .core import Group as Group +from .core import MultiCommand as MultiCommand +from .core import Option as Option +from .core import Parameter as Parameter +from .decorators import argument as argument +from .decorators import command as command +from .decorators import confirmation_option as confirmation_option +from .decorators import group as group +from .decorators import help_option as help_option +from .decorators import make_pass_decorator as make_pass_decorator +from .decorators import option as option +from .decorators import pass_context as pass_context +from .decorators import pass_obj as pass_obj +from .decorators import password_option as password_option +from .decorators import version_option as version_option +from .exceptions import Abort as Abort +from .exceptions import BadArgumentUsage as BadArgumentUsage +from .exceptions import BadOptionUsage as BadOptionUsage +from .exceptions import BadParameter as BadParameter +from .exceptions import ClickException as ClickException +from .exceptions import FileError as FileError +from .exceptions import MissingParameter as MissingParameter +from .exceptions import NoSuchOption as NoSuchOption +from .exceptions import UsageError as UsageError +from .formatting import HelpFormatter as HelpFormatter +from .formatting import wrap_text as wrap_text +from .globals import get_current_context as get_current_context +from .parser import OptionParser as OptionParser +from .termui import clear as clear +from .termui import confirm as confirm +from .termui import echo_via_pager as echo_via_pager +from .termui import edit as edit +from .termui import getchar as getchar +from .termui import launch as launch +from .termui import pause as pause +from .termui import progressbar as progressbar +from .termui import prompt as prompt +from .termui import secho as secho +from .termui import style as style +from .termui import unstyle as unstyle +from .types import BOOL as BOOL +from .types import Choice as Choice +from .types import DateTime as DateTime +from .types import File as File +from .types import FLOAT as FLOAT +from .types import FloatRange as FloatRange +from .types import INT as INT +from .types import IntRange as IntRange +from .types import ParamType as ParamType +from .types import Path as Path +from .types import STRING as STRING +from .types import Tuple as Tuple +from .types import UNPROCESSED as UNPROCESSED +from .types import UUID as UUID +from .utils import echo as echo +from .utils import format_filename as format_filename +from .utils import get_app_dir as get_app_dir +from .utils import get_binary_stream as get_binary_stream +from .utils import get_text_stream as get_text_stream +from .utils import open_file as open_file + +__version__ = "8.1.7" diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce78048dcfd248fddce850f0a6a520a1cf821eab Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/_compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..65f356cf5003cac6897a97aeebf6cdc7dca5e4c2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/_compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/_termui_impl.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/_termui_impl.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..127a129bc822ffa550224798363d1958a48b07bc Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/_termui_impl.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/_textwrap.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/_textwrap.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c97f187928cb4036dab59d0d1be08c08ea7c728 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/_textwrap.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/_winconsole.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/_winconsole.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c7dba3b1d135b447a3f7c0a675a4732397a346b1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/_winconsole.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/core.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/core.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..803a537299afd4503233f6b330b1bbfdd6c10213 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/core.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/decorators.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/decorators.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ca92bf26eb943663a24a7b91984029dd1efc03e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/decorators.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/exceptions.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3f734ade99f70e780c46b12d6733c801b31135e4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/exceptions.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/formatting.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/formatting.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2bf2d8f6062b9ea1e131671a2c710e7d6c475a24 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/formatting.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/globals.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/globals.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..603b25ca65c10ad6bd8c7887830de46fb40cd8b2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/globals.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/parser.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/parser.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0921a3f7948b216c4a264a3a3fa79daece56d1d8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/parser.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/shell_completion.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/shell_completion.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0f91095bad08ce9fb20d02458776b3a334648045 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/shell_completion.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/termui.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/termui.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f764a7f73d5b43d7a828d748986652d637fe53bb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/termui.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/testing.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/testing.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7d6b81811eeb22e10c651fcfb3e9daa21f4e284 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/testing.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/types.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/types.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9246e392725496cb2ad37d357b3e232dea75c484 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/types.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/__pycache__/utils.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/click/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..989ba22be7bd232c4f8880a1501b2e35edf75eda Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/click/__pycache__/utils.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/click/_compat.py b/backend/test/lib/python3.8/site-packages/click/_compat.py new file mode 100644 index 0000000000000000000000000000000000000000..23f8866598b4b4eb836b9d9b210ebd395fd0c557 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/_compat.py @@ -0,0 +1,623 @@ +import codecs +import io +import os +import re +import sys +import typing as t +from weakref import WeakKeyDictionary + +CYGWIN = sys.platform.startswith("cygwin") +WIN = sys.platform.startswith("win") +auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None +_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") + + +def _make_text_stream( + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if encoding is None: + encoding = get_best_encoding(stream) + if errors is None: + errors = "replace" + return _NonClosingTextIOWrapper( + stream, + encoding, + errors, + line_buffering=True, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def is_ascii_encoding(encoding: str) -> bool: + """Checks if a given encoding is ascii.""" + try: + return codecs.lookup(encoding).name == "ascii" + except LookupError: + return False + + +def get_best_encoding(stream: t.IO[t.Any]) -> str: + """Returns the default stream encoding if not found.""" + rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() + if is_ascii_encoding(rv): + return "utf-8" + return rv + + +class _NonClosingTextIOWrapper(io.TextIOWrapper): + def __init__( + self, + stream: t.BinaryIO, + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, + force_writable: bool = False, + **extra: t.Any, + ) -> None: + self._stream = stream = t.cast( + t.BinaryIO, _FixupStream(stream, force_readable, force_writable) + ) + super().__init__(stream, encoding, errors, **extra) + + def __del__(self) -> None: + try: + self.detach() + except Exception: + pass + + def isatty(self) -> bool: + # https://bitbucket.org/pypy/pypy/issue/1803 + return self._stream.isatty() + + +class _FixupStream: + """The new io interface needs more from streams than streams + traditionally implement. As such, this fix-up code is necessary in + some circumstances. + + The forcing of readable and writable flags are there because some tools + put badly patched objects on sys (one such offender are certain version + of jupyter notebook). + """ + + def __init__( + self, + stream: t.BinaryIO, + force_readable: bool = False, + force_writable: bool = False, + ): + self._stream = stream + self._force_readable = force_readable + self._force_writable = force_writable + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._stream, name) + + def read1(self, size: int) -> bytes: + f = getattr(self._stream, "read1", None) + + if f is not None: + return t.cast(bytes, f(size)) + + return self._stream.read(size) + + def readable(self) -> bool: + if self._force_readable: + return True + x = getattr(self._stream, "readable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.read(0) + except Exception: + return False + return True + + def writable(self) -> bool: + if self._force_writable: + return True + x = getattr(self._stream, "writable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.write("") # type: ignore + except Exception: + try: + self._stream.write(b"") + except Exception: + return False + return True + + def seekable(self) -> bool: + x = getattr(self._stream, "seekable", None) + if x is not None: + return t.cast(bool, x()) + try: + self._stream.seek(self._stream.tell()) + except Exception: + return False + return True + + +def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + return isinstance(stream.read(0), bytes) + except Exception: + return default + # This happens in some cases where the stream was already + # closed. In this case, we assume the default. + + +def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: + try: + stream.write(b"") + except Exception: + try: + stream.write("") + return False + except Exception: + pass + return default + return True + + +def _find_binary_reader(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_reader(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_reader(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _find_binary_writer(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: + # We need to figure out if the given stream is already binary. + # This can happen because the official docs recommend detaching + # the streams to get binary streams. Some code might do this, so + # we need to deal with this case explicitly. + if _is_binary_writer(stream, False): + return t.cast(t.BinaryIO, stream) + + buf = getattr(stream, "buffer", None) + + # Same situation here; this time we assume that the buffer is + # actually binary in case it's closed. + if buf is not None and _is_binary_writer(buf, True): + return t.cast(t.BinaryIO, buf) + + return None + + +def _stream_is_misconfigured(stream: t.TextIO) -> bool: + """A stream is misconfigured if its encoding is ASCII.""" + # If the stream does not have an encoding set, we assume it's set + # to ASCII. This appears to happen in certain unittest + # environments. It's not quite clear what the correct behavior is + # but this at least will force Click to recover somehow. + return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") + + +def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: + """A stream attribute is compatible if it is equal to the + desired value or the desired value is unset and the attribute + has a value. + """ + stream_value = getattr(stream, attr, None) + return stream_value == value or (value is None and stream_value is not None) + + +def _is_compatible_text_stream( + stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> bool: + """Check if a stream's encoding and errors attributes are + compatible with the desired values. + """ + return _is_compat_stream_attr( + stream, "encoding", encoding + ) and _is_compat_stream_attr(stream, "errors", errors) + + +def _force_correct_text_stream( + text_stream: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + is_binary: t.Callable[[t.IO[t.Any], bool], bool], + find_binary: t.Callable[[t.IO[t.Any]], t.Optional[t.BinaryIO]], + force_readable: bool = False, + force_writable: bool = False, +) -> t.TextIO: + if is_binary(text_stream, False): + binary_reader = t.cast(t.BinaryIO, text_stream) + else: + text_stream = t.cast(t.TextIO, text_stream) + # If the stream looks compatible, and won't default to a + # misconfigured ascii encoding, return it as-is. + if _is_compatible_text_stream(text_stream, encoding, errors) and not ( + encoding is None and _stream_is_misconfigured(text_stream) + ): + return text_stream + + # Otherwise, get the underlying binary reader. + possible_binary_reader = find_binary(text_stream) + + # If that's not possible, silently use the original reader + # and get mojibake instead of exceptions. + if possible_binary_reader is None: + return text_stream + + binary_reader = possible_binary_reader + + # Default errors to replace instead of strict in order to get + # something that works. + if errors is None: + errors = "replace" + + # Wrap the binary stream in a text stream with the correct + # encoding parameters. + return _make_text_stream( + binary_reader, + encoding, + errors, + force_readable=force_readable, + force_writable=force_writable, + ) + + +def _force_correct_text_reader( + text_reader: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_readable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_reader, + encoding, + errors, + _is_binary_reader, + _find_binary_reader, + force_readable=force_readable, + ) + + +def _force_correct_text_writer( + text_writer: t.IO[t.Any], + encoding: t.Optional[str], + errors: t.Optional[str], + force_writable: bool = False, +) -> t.TextIO: + return _force_correct_text_stream( + text_writer, + encoding, + errors, + _is_binary_writer, + _find_binary_writer, + force_writable=force_writable, + ) + + +def get_binary_stdin() -> t.BinaryIO: + reader = _find_binary_reader(sys.stdin) + if reader is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdin.") + return reader + + +def get_binary_stdout() -> t.BinaryIO: + writer = _find_binary_writer(sys.stdout) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stdout.") + return writer + + +def get_binary_stderr() -> t.BinaryIO: + writer = _find_binary_writer(sys.stderr) + if writer is None: + raise RuntimeError("Was not able to determine binary stream for sys.stderr.") + return writer + + +def get_text_stdin( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdin, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) + + +def get_text_stdout( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stdout, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) + + +def get_text_stderr( + encoding: t.Optional[str] = None, errors: t.Optional[str] = None +) -> t.TextIO: + rv = _get_windows_console_stream(sys.stderr, encoding, errors) + if rv is not None: + return rv + return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) + + +def _wrap_io_open( + file: t.Union[str, "os.PathLike[str]", int], + mode: str, + encoding: t.Optional[str], + errors: t.Optional[str], +) -> t.IO[t.Any]: + """Handles not passing ``encoding`` and ``errors`` in binary mode.""" + if "b" in mode: + return open(file, mode) + + return open(file, mode, encoding=encoding, errors=errors) + + +def open_stream( + filename: "t.Union[str, os.PathLike[str]]", + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, +) -> t.Tuple[t.IO[t.Any], bool]: + binary = "b" in mode + filename = os.fspath(filename) + + # Standard streams first. These are simple because they ignore the + # atomic flag. Use fsdecode to handle Path("-"). + if os.fsdecode(filename) == "-": + if any(m in mode for m in ["w", "a", "x"]): + if binary: + return get_binary_stdout(), False + return get_text_stdout(encoding=encoding, errors=errors), False + if binary: + return get_binary_stdin(), False + return get_text_stdin(encoding=encoding, errors=errors), False + + # Non-atomic writes directly go out through the regular open functions. + if not atomic: + return _wrap_io_open(filename, mode, encoding, errors), True + + # Some usability stuff for atomic writes + if "a" in mode: + raise ValueError( + "Appending to an existing file is not supported, because that" + " would involve an expensive `copy`-operation to a temporary" + " file. Open the file in normal `w`-mode and copy explicitly" + " if that's what you're after." + ) + if "x" in mode: + raise ValueError("Use the `overwrite`-parameter instead.") + if "w" not in mode: + raise ValueError("Atomic writes only make sense with `w`-mode.") + + # Atomic writes are more complicated. They work by opening a file + # as a proxy in the same folder and then using the fdopen + # functionality to wrap it in a Python file. Then we wrap it in an + # atomic file that moves the file over on close. + import errno + import random + + try: + perm: t.Optional[int] = os.stat(filename).st_mode + except OSError: + perm = None + + flags = os.O_RDWR | os.O_CREAT | os.O_EXCL + + if binary: + flags |= getattr(os, "O_BINARY", 0) + + while True: + tmp_filename = os.path.join( + os.path.dirname(filename), + f".__atomic-write{random.randrange(1 << 32):08x}", + ) + try: + fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) + break + except OSError as e: + if e.errno == errno.EEXIST or ( + os.name == "nt" + and e.errno == errno.EACCES + and os.path.isdir(e.filename) + and os.access(e.filename, os.W_OK) + ): + continue + raise + + if perm is not None: + os.chmod(tmp_filename, perm) # in case perm includes bits in umask + + f = _wrap_io_open(fd, mode, encoding, errors) + af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) + return t.cast(t.IO[t.Any], af), True + + +class _AtomicFile: + def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: + self._f = f + self._tmp_filename = tmp_filename + self._real_filename = real_filename + self.closed = False + + @property + def name(self) -> str: + return self._real_filename + + def close(self, delete: bool = False) -> None: + if self.closed: + return + self._f.close() + os.replace(self._tmp_filename, self._real_filename) + self.closed = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._f, name) + + def __enter__(self) -> "_AtomicFile": + return self + + def __exit__(self, exc_type: t.Optional[t.Type[BaseException]], *_: t.Any) -> None: + self.close(delete=exc_type is not None) + + def __repr__(self) -> str: + return repr(self._f) + + +def strip_ansi(value: str) -> str: + return _ansi_re.sub("", value) + + +def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: + while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): + stream = stream._stream + + return stream.__class__.__module__.startswith("ipykernel.") + + +def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None +) -> bool: + if color is None: + if stream is None: + stream = sys.stdin + return not isatty(stream) and not _is_jupyter_kernel_output(stream) + return not color + + +# On Windows, wrap the output streams with colorama to support ANSI +# color codes. +# NOTE: double check is needed so mypy does not analyze this on Linux +if sys.platform.startswith("win") and WIN: + from ._winconsole import _get_windows_console_stream + + def _get_argv_encoding() -> str: + import locale + + return locale.getpreferredencoding() + + _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def auto_wrap_for_ansi( # noqa: F811 + stream: t.TextIO, color: t.Optional[bool] = None + ) -> t.TextIO: + """Support ANSI color and style codes on Windows by wrapping a + stream with colorama. + """ + try: + cached = _ansi_stream_wrappers.get(stream) + except Exception: + cached = None + + if cached is not None: + return cached + + import colorama + + strip = should_strip_ansi(stream, color) + ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) + rv = t.cast(t.TextIO, ansi_wrapper.stream) + _write = rv.write + + def _safe_write(s): + try: + return _write(s) + except BaseException: + ansi_wrapper.reset_all() + raise + + rv.write = _safe_write + + try: + _ansi_stream_wrappers[stream] = rv + except Exception: + pass + + return rv + +else: + + def _get_argv_encoding() -> str: + return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() + + def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] + ) -> t.Optional[t.TextIO]: + return None + + +def term_len(x: str) -> int: + return len(strip_ansi(x)) + + +def isatty(stream: t.IO[t.Any]) -> bool: + try: + return stream.isatty() + except Exception: + return False + + +def _make_cached_stream_func( + src_func: t.Callable[[], t.Optional[t.TextIO]], + wrapper_func: t.Callable[[], t.TextIO], +) -> t.Callable[[], t.Optional[t.TextIO]]: + cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() + + def func() -> t.Optional[t.TextIO]: + stream = src_func() + + if stream is None: + return None + + try: + rv = cache.get(stream) + except Exception: + rv = None + if rv is not None: + return rv + rv = wrapper_func() + try: + cache[stream] = rv + except Exception: + pass + return rv + + return func + + +_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) +_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) +_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) + + +binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { + "stdin": get_binary_stdin, + "stdout": get_binary_stdout, + "stderr": get_binary_stderr, +} + +text_streams: t.Mapping[ + str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] +] = { + "stdin": get_text_stdin, + "stdout": get_text_stdout, + "stderr": get_text_stderr, +} diff --git a/backend/test/lib/python3.8/site-packages/click/_termui_impl.py b/backend/test/lib/python3.8/site-packages/click/_termui_impl.py new file mode 100644 index 0000000000000000000000000000000000000000..f744657753caa6cdef1dcc41a4f0b5e3e9503ab8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/_termui_impl.py @@ -0,0 +1,739 @@ +""" +This module contains implementations for the termui module. To keep the +import time of Click down, some infrequently used functionality is +placed in this module and only imported as needed. +""" +import contextlib +import math +import os +import sys +import time +import typing as t +from gettext import gettext as _ +from io import StringIO +from types import TracebackType + +from ._compat import _default_text_stdout +from ._compat import CYGWIN +from ._compat import get_best_encoding +from ._compat import isatty +from ._compat import open_stream +from ._compat import strip_ansi +from ._compat import term_len +from ._compat import WIN +from .exceptions import ClickException +from .utils import echo + +V = t.TypeVar("V") + +if os.name == "nt": + BEFORE_BAR = "\r" + AFTER_BAR = "\n" +else: + BEFORE_BAR = "\r\033[?25l" + AFTER_BAR = "\033[?25h\n" + + +class ProgressBar(t.Generic[V]): + def __init__( + self, + iterable: t.Optional[t.Iterable[V]], + length: t.Optional[int] = None, + fill_char: str = "#", + empty_char: str = " ", + bar_template: str = "%(bar)s", + info_sep: str = " ", + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + label: t.Optional[str] = None, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, + width: int = 30, + ) -> None: + self.fill_char = fill_char + self.empty_char = empty_char + self.bar_template = bar_template + self.info_sep = info_sep + self.show_eta = show_eta + self.show_percent = show_percent + self.show_pos = show_pos + self.item_show_func = item_show_func + self.label: str = label or "" + + if file is None: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + file = StringIO() + + self.file = file + self.color = color + self.update_min_steps = update_min_steps + self._completed_intervals = 0 + self.width: int = width + self.autowidth: bool = width == 0 + + if length is None: + from operator import length_hint + + length = length_hint(iterable, -1) + + if length == -1: + length = None + if iterable is None: + if length is None: + raise TypeError("iterable or length is required") + iterable = t.cast(t.Iterable[V], range(length)) + self.iter: t.Iterable[V] = iter(iterable) + self.length = length + self.pos = 0 + self.avg: t.List[float] = [] + self.last_eta: float + self.start: float + self.start = self.last_eta = time.time() + self.eta_known: bool = False + self.finished: bool = False + self.max_width: t.Optional[int] = None + self.entered: bool = False + self.current_item: t.Optional[V] = None + self.is_hidden: bool = not isatty(self.file) + self._last_line: t.Optional[str] = None + + def __enter__(self) -> "ProgressBar[V]": + self.entered = True + self.render_progress() + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.render_finish() + + def __iter__(self) -> t.Iterator[V]: + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + self.render_progress() + return self.generator() + + def __next__(self) -> V: + # Iteration is defined in terms of a generator function, + # returned by iter(self); use that to define next(). This works + # because `self.iter` is an iterable consumed by that generator, + # so it is re-entry safe. Calling `next(self.generator())` + # twice works and does "what you want". + return next(iter(self)) + + def render_finish(self) -> None: + if self.is_hidden: + return + self.file.write(AFTER_BAR) + self.file.flush() + + @property + def pct(self) -> float: + if self.finished: + return 1.0 + return min(self.pos / (float(self.length or 1) or 1), 1.0) + + @property + def time_per_iteration(self) -> float: + if not self.avg: + return 0.0 + return sum(self.avg) / float(len(self.avg)) + + @property + def eta(self) -> float: + if self.length is not None and not self.finished: + return self.time_per_iteration * (self.length - self.pos) + return 0.0 + + def format_eta(self) -> str: + if self.eta_known: + t = int(self.eta) + seconds = t % 60 + t //= 60 + minutes = t % 60 + t //= 60 + hours = t % 24 + t //= 24 + if t > 0: + return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" + else: + return f"{hours:02}:{minutes:02}:{seconds:02}" + return "" + + def format_pos(self) -> str: + pos = str(self.pos) + if self.length is not None: + pos += f"/{self.length}" + return pos + + def format_pct(self) -> str: + return f"{int(self.pct * 100): 4}%"[1:] + + def format_bar(self) -> str: + if self.length is not None: + bar_length = int(self.pct * self.width) + bar = self.fill_char * bar_length + bar += self.empty_char * (self.width - bar_length) + elif self.finished: + bar = self.fill_char * self.width + else: + chars = list(self.empty_char * (self.width or 1)) + if self.time_per_iteration != 0: + chars[ + int( + (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) + * self.width + ) + ] = self.fill_char + bar = "".join(chars) + return bar + + def format_progress_line(self) -> str: + show_percent = self.show_percent + + info_bits = [] + if self.length is not None and show_percent is None: + show_percent = not self.show_pos + + if self.show_pos: + info_bits.append(self.format_pos()) + if show_percent: + info_bits.append(self.format_pct()) + if self.show_eta and self.eta_known and not self.finished: + info_bits.append(self.format_eta()) + if self.item_show_func is not None: + item_info = self.item_show_func(self.current_item) + if item_info is not None: + info_bits.append(item_info) + + return ( + self.bar_template + % { + "label": self.label, + "bar": self.format_bar(), + "info": self.info_sep.join(info_bits), + } + ).rstrip() + + def render_progress(self) -> None: + import shutil + + if self.is_hidden: + # Only output the label as it changes if the output is not a + # TTY. Use file=stderr if you expect to be piping stdout. + if self._last_line != self.label: + self._last_line = self.label + echo(self.label, file=self.file, color=self.color) + + return + + buf = [] + # Update width in case the terminal has been resized + if self.autowidth: + old_width = self.width + self.width = 0 + clutter_length = term_len(self.format_progress_line()) + new_width = max(0, shutil.get_terminal_size().columns - clutter_length) + if new_width < old_width: + buf.append(BEFORE_BAR) + buf.append(" " * self.max_width) # type: ignore + self.max_width = new_width + self.width = new_width + + clear_width = self.width + if self.max_width is not None: + clear_width = self.max_width + + buf.append(BEFORE_BAR) + line = self.format_progress_line() + line_len = term_len(line) + if self.max_width is None or self.max_width < line_len: + self.max_width = line_len + + buf.append(line) + buf.append(" " * (clear_width - line_len)) + line = "".join(buf) + # Render the line only if it changed. + + if line != self._last_line: + self._last_line = line + echo(line, file=self.file, color=self.color, nl=False) + self.file.flush() + + def make_step(self, n_steps: int) -> None: + self.pos += n_steps + if self.length is not None and self.pos >= self.length: + self.finished = True + + if (time.time() - self.last_eta) < 1.0: + return + + self.last_eta = time.time() + + # self.avg is a rolling list of length <= 7 of steps where steps are + # defined as time elapsed divided by the total progress through + # self.length. + if self.pos: + step = (time.time() - self.start) / self.pos + else: + step = time.time() - self.start + + self.avg = self.avg[-6:] + [step] + + self.eta_known = self.length is not None + + def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: + """Update the progress bar by advancing a specified number of + steps, and optionally set the ``current_item`` for this new + position. + + :param n_steps: Number of steps to advance. + :param current_item: Optional item to set as ``current_item`` + for the updated position. + + .. versionchanged:: 8.0 + Added the ``current_item`` optional parameter. + + .. versionchanged:: 8.0 + Only render when the number of steps meets the + ``update_min_steps`` threshold. + """ + if current_item is not None: + self.current_item = current_item + + self._completed_intervals += n_steps + + if self._completed_intervals >= self.update_min_steps: + self.make_step(self._completed_intervals) + self.render_progress() + self._completed_intervals = 0 + + def finish(self) -> None: + self.eta_known = False + self.current_item = None + self.finished = True + + def generator(self) -> t.Iterator[V]: + """Return a generator which yields the items added to the bar + during construction, and updates the progress bar *after* the + yielded block returns. + """ + # WARNING: the iterator interface for `ProgressBar` relies on + # this and only works because this is a simple generator which + # doesn't create or manage additional state. If this function + # changes, the impact should be evaluated both against + # `iter(bar)` and `next(bar)`. `next()` in particular may call + # `self.generator()` repeatedly, and this must remain safe in + # order for that interface to work. + if not self.entered: + raise RuntimeError("You need to use progress bars in a with block.") + + if self.is_hidden: + yield from self.iter + else: + for rv in self.iter: + self.current_item = rv + + # This allows show_item_func to be updated before the + # item is processed. Only trigger at the beginning of + # the update interval. + if self._completed_intervals == 0: + self.render_progress() + + yield rv + self.update(1) + + self.finish() + self.render_progress() + + +def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: + """Decide what method to use for paging through text.""" + stdout = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if stdout is None: + stdout = StringIO() + + if not isatty(sys.stdin) or not isatty(stdout): + return _nullpager(stdout, generator, color) + pager_cmd = (os.environ.get("PAGER", None) or "").strip() + if pager_cmd: + if WIN: + return _tempfilepager(generator, pager_cmd, color) + return _pipepager(generator, pager_cmd, color) + if os.environ.get("TERM") in ("dumb", "emacs"): + return _nullpager(stdout, generator, color) + if WIN or sys.platform.startswith("os2"): + return _tempfilepager(generator, "more <", color) + if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: + return _pipepager(generator, "less", color) + + import tempfile + + fd, filename = tempfile.mkstemp() + os.close(fd) + try: + if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: + return _pipepager(generator, "more", color) + return _nullpager(stdout, generator, color) + finally: + os.unlink(filename) + + +def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: + """Page through text by feeding it to another program. Invoking a + pager through this might support colors. + """ + import subprocess + + env = dict(os.environ) + + # If we're piping to less we might support colors under the + # condition that + cmd_detail = cmd.rsplit("/", 1)[-1].split() + if color is None and cmd_detail[0] == "less": + less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" + if not less_flags: + env["LESS"] = "-R" + color = True + elif "r" in less_flags or "R" in less_flags: + color = True + + c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) + stdin = t.cast(t.BinaryIO, c.stdin) + encoding = get_best_encoding(stdin) + try: + for text in generator: + if not color: + text = strip_ansi(text) + + stdin.write(text.encode(encoding, "replace")) + except (OSError, KeyboardInterrupt): + pass + else: + stdin.close() + + # Less doesn't respect ^C, but catches it for its own UI purposes (aborting + # search or other commands inside less). + # + # That means when the user hits ^C, the parent process (click) terminates, + # but less is still alive, paging the output and messing up the terminal. + # + # If the user wants to make the pager exit on ^C, they should set + # `LESS='-K'`. It's not our decision to make. + while True: + try: + c.wait() + except KeyboardInterrupt: + pass + else: + break + + +def _tempfilepager( + generator: t.Iterable[str], cmd: str, color: t.Optional[bool] +) -> None: + """Page through text by invoking a program on a temporary file.""" + import tempfile + + fd, filename = tempfile.mkstemp() + # TODO: This never terminates if the passed generator never terminates. + text = "".join(generator) + if not color: + text = strip_ansi(text) + encoding = get_best_encoding(sys.stdout) + with open_stream(filename, "wb")[0] as f: + f.write(text.encode(encoding)) + try: + os.system(f'{cmd} "{filename}"') + finally: + os.close(fd) + os.unlink(filename) + + +def _nullpager( + stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] +) -> None: + """Simply print unformatted text. This is the ultimate fallback.""" + for text in generator: + if not color: + text = strip_ansi(text) + stream.write(text) + + +class Editor: + def __init__( + self, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + ) -> None: + self.editor = editor + self.env = env + self.require_save = require_save + self.extension = extension + + def get_editor(self) -> str: + if self.editor is not None: + return self.editor + for key in "VISUAL", "EDITOR": + rv = os.environ.get(key) + if rv: + return rv + if WIN: + return "notepad" + for editor in "sensible-editor", "vim", "nano": + if os.system(f"which {editor} >/dev/null 2>&1") == 0: + return editor + return "vi" + + def edit_file(self, filename: str) -> None: + import subprocess + + editor = self.get_editor() + environ: t.Optional[t.Dict[str, str]] = None + + if self.env: + environ = os.environ.copy() + environ.update(self.env) + + try: + c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) + exit_code = c.wait() + if exit_code != 0: + raise ClickException( + _("{editor}: Editing failed").format(editor=editor) + ) + except OSError as e: + raise ClickException( + _("{editor}: Editing failed: {e}").format(editor=editor, e=e) + ) from e + + def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: + import tempfile + + if not text: + data = b"" + elif isinstance(text, (bytes, bytearray)): + data = text + else: + if text and not text.endswith("\n"): + text += "\n" + + if WIN: + data = text.replace("\n", "\r\n").encode("utf-8-sig") + else: + data = text.encode("utf-8") + + fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) + f: t.BinaryIO + + try: + with os.fdopen(fd, "wb") as f: + f.write(data) + + # If the filesystem resolution is 1 second, like Mac OS + # 10.12 Extended, or 2 seconds, like FAT32, and the editor + # closes very fast, require_save can fail. Set the modified + # time to be 2 seconds in the past to work around this. + os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) + # Depending on the resolution, the exact value might not be + # recorded, so get the new recorded value. + timestamp = os.path.getmtime(name) + + self.edit_file(name) + + if self.require_save and os.path.getmtime(name) == timestamp: + return None + + with open(name, "rb") as f: + rv = f.read() + + if isinstance(text, (bytes, bytearray)): + return rv + + return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore + finally: + os.unlink(name) + + +def open_url(url: str, wait: bool = False, locate: bool = False) -> int: + import subprocess + + def _unquote_file(url: str) -> str: + from urllib.parse import unquote + + if url.startswith("file://"): + url = unquote(url[7:]) + + return url + + if sys.platform == "darwin": + args = ["open"] + if wait: + args.append("-W") + if locate: + args.append("-R") + args.append(_unquote_file(url)) + null = open("/dev/null", "w") + try: + return subprocess.Popen(args, stderr=null).wait() + finally: + null.close() + elif WIN: + if locate: + url = _unquote_file(url.replace('"', "")) + args = f'explorer /select,"{url}"' + else: + url = url.replace('"', "") + wait_str = "/WAIT" if wait else "" + args = f'start {wait_str} "" "{url}"' + return os.system(args) + elif CYGWIN: + if locate: + url = os.path.dirname(_unquote_file(url).replace('"', "")) + args = f'cygstart "{url}"' + else: + url = url.replace('"', "") + wait_str = "-w" if wait else "" + args = f'cygstart {wait_str} "{url}"' + return os.system(args) + + try: + if locate: + url = os.path.dirname(_unquote_file(url)) or "." + else: + url = _unquote_file(url) + c = subprocess.Popen(["xdg-open", url]) + if wait: + return c.wait() + return 0 + except OSError: + if url.startswith(("http://", "https://")) and not locate and not wait: + import webbrowser + + webbrowser.open(url) + return 0 + return 1 + + +def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: + if ch == "\x03": + raise KeyboardInterrupt() + + if ch == "\x04" and not WIN: # Unix-like, Ctrl+D + raise EOFError() + + if ch == "\x1a" and WIN: # Windows, Ctrl+Z + raise EOFError() + + return None + + +if WIN: + import msvcrt + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + yield -1 + + def getchar(echo: bool) -> str: + # The function `getch` will return a bytes object corresponding to + # the pressed character. Since Windows 10 build 1803, it will also + # return \x00 when called a second time after pressing a regular key. + # + # `getwch` does not share this probably-bugged behavior. Moreover, it + # returns a Unicode object by default, which is what we want. + # + # Either of these functions will return \x00 or \xe0 to indicate + # a special key, and you need to call the same function again to get + # the "rest" of the code. The fun part is that \u00e0 is + # "latin small letter a with grave", so if you type that on a French + # keyboard, you _also_ get a \xe0. + # E.g., consider the Up arrow. This returns \xe0 and then \x48. The + # resulting Unicode string reads as "a with grave" + "capital H". + # This is indistinguishable from when the user actually types + # "a with grave" and then "capital H". + # + # When \xe0 is returned, we assume it's part of a special-key sequence + # and call `getwch` again, but that means that when the user types + # the \u00e0 character, `getchar` doesn't return until a second + # character is typed. + # The alternative is returning immediately, but that would mess up + # cross-platform handling of arrow keys and others that start with + # \xe0. Another option is using `getch`, but then we can't reliably + # read non-ASCII characters, because return values of `getch` are + # limited to the current 8-bit codepage. + # + # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` + # is doing the right thing in more situations than with `getch`. + func: t.Callable[[], str] + + if echo: + func = msvcrt.getwche # type: ignore + else: + func = msvcrt.getwch # type: ignore + + rv = func() + + if rv in ("\x00", "\xe0"): + # \x00 and \xe0 are control characters that indicate special key, + # see above. + rv += func() + + _translate_ch_to_exc(rv) + return rv + +else: + import tty + import termios + + @contextlib.contextmanager + def raw_terminal() -> t.Iterator[int]: + f: t.Optional[t.TextIO] + fd: int + + if not isatty(sys.stdin): + f = open("/dev/tty") + fd = f.fileno() + else: + fd = sys.stdin.fileno() + f = None + + try: + old_settings = termios.tcgetattr(fd) + + try: + tty.setraw(fd) + yield fd + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + sys.stdout.flush() + + if f is not None: + f.close() + except termios.error: + pass + + def getchar(echo: bool) -> str: + with raw_terminal() as fd: + ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") + + if echo and isatty(sys.stdout): + sys.stdout.write(ch) + + _translate_ch_to_exc(ch) + return ch diff --git a/backend/test/lib/python3.8/site-packages/click/_textwrap.py b/backend/test/lib/python3.8/site-packages/click/_textwrap.py new file mode 100644 index 0000000000000000000000000000000000000000..b47dcbd4264e86715adfae1c5124c288b67a983e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/_textwrap.py @@ -0,0 +1,49 @@ +import textwrap +import typing as t +from contextlib import contextmanager + + +class TextWrapper(textwrap.TextWrapper): + def _handle_long_word( + self, + reversed_chunks: t.List[str], + cur_line: t.List[str], + cur_len: int, + width: int, + ) -> None: + space_left = max(width - cur_len, 1) + + if self.break_long_words: + last = reversed_chunks[-1] + cut = last[:space_left] + res = last[space_left:] + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + + @contextmanager + def extra_indent(self, indent: str) -> t.Iterator[None]: + old_initial_indent = self.initial_indent + old_subsequent_indent = self.subsequent_indent + self.initial_indent += indent + self.subsequent_indent += indent + + try: + yield + finally: + self.initial_indent = old_initial_indent + self.subsequent_indent = old_subsequent_indent + + def indent_only(self, text: str) -> str: + rv = [] + + for idx, line in enumerate(text.splitlines()): + indent = self.initial_indent + + if idx > 0: + indent = self.subsequent_indent + + rv.append(f"{indent}{line}") + + return "\n".join(rv) diff --git a/backend/test/lib/python3.8/site-packages/click/_winconsole.py b/backend/test/lib/python3.8/site-packages/click/_winconsole.py new file mode 100644 index 0000000000000000000000000000000000000000..6b20df315b23ecd1e3d0ec32c11c0b5ced577efe --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/_winconsole.py @@ -0,0 +1,279 @@ +# This module is based on the excellent work by Adam Bartoš who +# provided a lot of what went into the implementation here in +# the discussion to issue1602 in the Python bug tracker. +# +# There are some general differences in regards to how this works +# compared to the original patches as we do not need to patch +# the entire interpreter but just work in our little world of +# echo and prompt. +import io +import sys +import time +import typing as t +from ctypes import byref +from ctypes import c_char +from ctypes import c_char_p +from ctypes import c_int +from ctypes import c_ssize_t +from ctypes import c_ulong +from ctypes import c_void_p +from ctypes import POINTER +from ctypes import py_object +from ctypes import Structure +from ctypes.wintypes import DWORD +from ctypes.wintypes import HANDLE +from ctypes.wintypes import LPCWSTR +from ctypes.wintypes import LPWSTR + +from ._compat import _NonClosingTextIOWrapper + +assert sys.platform == "win32" +import msvcrt # noqa: E402 +from ctypes import windll # noqa: E402 +from ctypes import WINFUNCTYPE # noqa: E402 + +c_ssize_p = POINTER(c_ssize_t) + +kernel32 = windll.kernel32 +GetStdHandle = kernel32.GetStdHandle +ReadConsoleW = kernel32.ReadConsoleW +WriteConsoleW = kernel32.WriteConsoleW +GetConsoleMode = kernel32.GetConsoleMode +GetLastError = kernel32.GetLastError +GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) +CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( + ("CommandLineToArgvW", windll.shell32) +) +LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) + +STDIN_HANDLE = GetStdHandle(-10) +STDOUT_HANDLE = GetStdHandle(-11) +STDERR_HANDLE = GetStdHandle(-12) + +PyBUF_SIMPLE = 0 +PyBUF_WRITABLE = 1 + +ERROR_SUCCESS = 0 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_OPERATION_ABORTED = 995 + +STDIN_FILENO = 0 +STDOUT_FILENO = 1 +STDERR_FILENO = 2 + +EOF = b"\x1a" +MAX_BYTES_WRITTEN = 32767 + +try: + from ctypes import pythonapi +except ImportError: + # On PyPy we cannot get buffers so our ability to operate here is + # severely limited. + get_buffer = None +else: + + class Py_buffer(Structure): + _fields_ = [ + ("buf", c_void_p), + ("obj", py_object), + ("len", c_ssize_t), + ("itemsize", c_ssize_t), + ("readonly", c_int), + ("ndim", c_int), + ("format", c_char_p), + ("shape", c_ssize_p), + ("strides", c_ssize_p), + ("suboffsets", c_ssize_p), + ("internal", c_void_p), + ] + + PyObject_GetBuffer = pythonapi.PyObject_GetBuffer + PyBuffer_Release = pythonapi.PyBuffer_Release + + def get_buffer(obj, writable=False): + buf = Py_buffer() + flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE + PyObject_GetBuffer(py_object(obj), byref(buf), flags) + + try: + buffer_type = c_char * buf.len + return buffer_type.from_address(buf.buf) + finally: + PyBuffer_Release(byref(buf)) + + +class _WindowsConsoleRawIOBase(io.RawIOBase): + def __init__(self, handle): + self.handle = handle + + def isatty(self): + super().isatty() + return True + + +class _WindowsConsoleReader(_WindowsConsoleRawIOBase): + def readable(self): + return True + + def readinto(self, b): + bytes_to_be_read = len(b) + if not bytes_to_be_read: + return 0 + elif bytes_to_be_read % 2: + raise ValueError( + "cannot read odd number of bytes from UTF-16-LE encoded console" + ) + + buffer = get_buffer(b, writable=True) + code_units_to_be_read = bytes_to_be_read // 2 + code_units_read = c_ulong() + + rv = ReadConsoleW( + HANDLE(self.handle), + buffer, + code_units_to_be_read, + byref(code_units_read), + None, + ) + if GetLastError() == ERROR_OPERATION_ABORTED: + # wait for KeyboardInterrupt + time.sleep(0.1) + if not rv: + raise OSError(f"Windows error: {GetLastError()}") + + if buffer[0] == EOF: + return 0 + return 2 * code_units_read.value + + +class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): + def writable(self): + return True + + @staticmethod + def _get_error_message(errno): + if errno == ERROR_SUCCESS: + return "ERROR_SUCCESS" + elif errno == ERROR_NOT_ENOUGH_MEMORY: + return "ERROR_NOT_ENOUGH_MEMORY" + return f"Windows error {errno}" + + def write(self, b): + bytes_to_be_written = len(b) + buf = get_buffer(b) + code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 + code_units_written = c_ulong() + + WriteConsoleW( + HANDLE(self.handle), + buf, + code_units_to_be_written, + byref(code_units_written), + None, + ) + bytes_written = 2 * code_units_written.value + + if bytes_written == 0 and bytes_to_be_written > 0: + raise OSError(self._get_error_message(GetLastError())) + return bytes_written + + +class ConsoleStream: + def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: + self._text_stream = text_stream + self.buffer = byte_stream + + @property + def name(self) -> str: + return self.buffer.name + + def write(self, x: t.AnyStr) -> int: + if isinstance(x, str): + return self._text_stream.write(x) + try: + self.flush() + except Exception: + pass + return self.buffer.write(x) + + def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: + for line in lines: + self.write(line) + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._text_stream, name) + + def isatty(self) -> bool: + return self.buffer.isatty() + + def __repr__(self): + return f"<ConsoleStream name={self.name!r} encoding={self.encoding!r}>" + + +def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: + text_stream = _NonClosingTextIOWrapper( + io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), + "utf-16-le", + "strict", + line_buffering=True, + ) + return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) + + +_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { + 0: _get_text_stdin, + 1: _get_text_stdout, + 2: _get_text_stderr, +} + + +def _is_console(f: t.TextIO) -> bool: + if not hasattr(f, "fileno"): + return False + + try: + fileno = f.fileno() + except (OSError, io.UnsupportedOperation): + return False + + handle = msvcrt.get_osfhandle(fileno) + return bool(GetConsoleMode(handle, byref(DWORD()))) + + +def _get_windows_console_stream( + f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] +) -> t.Optional[t.TextIO]: + if ( + get_buffer is not None + and encoding in {"utf-16-le", None} + and errors in {"strict", None} + and _is_console(f) + ): + func = _stream_factories.get(f.fileno()) + if func is not None: + b = getattr(f, "buffer", None) + + if b is None: + return None + + return func(b) diff --git a/backend/test/lib/python3.8/site-packages/click/core.py b/backend/test/lib/python3.8/site-packages/click/core.py new file mode 100644 index 0000000000000000000000000000000000000000..cc65e896bf2d754d74b54a84ac501b80127f83ca --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/core.py @@ -0,0 +1,3042 @@ +import enum +import errno +import inspect +import os +import sys +import typing as t +from collections import abc +from contextlib import contextmanager +from contextlib import ExitStack +from functools import update_wrapper +from gettext import gettext as _ +from gettext import ngettext +from itertools import repeat +from types import TracebackType + +from . import types +from .exceptions import Abort +from .exceptions import BadParameter +from .exceptions import ClickException +from .exceptions import Exit +from .exceptions import MissingParameter +from .exceptions import UsageError +from .formatting import HelpFormatter +from .formatting import join_options +from .globals import pop_context +from .globals import push_context +from .parser import _flag_needs_value +from .parser import OptionParser +from .parser import split_opt +from .termui import confirm +from .termui import prompt +from .termui import style +from .utils import _detect_program_name +from .utils import _expand_args +from .utils import echo +from .utils import make_default_short_help +from .utils import make_str +from .utils import PacifyFlushWrapper + +if t.TYPE_CHECKING: + import typing_extensions as te + from .shell_completion import CompletionItem + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +V = t.TypeVar("V") + + +def _complete_visible_commands( + ctx: "Context", incomplete: str +) -> t.Iterator[t.Tuple[str, "Command"]]: + """List all the subcommands of a group that start with the + incomplete value and aren't hidden. + + :param ctx: Invocation context for the group. + :param incomplete: Value being completed. May be empty. + """ + multi = t.cast(MultiCommand, ctx.command) + + for name in multi.list_commands(ctx): + if name.startswith(incomplete): + command = multi.get_command(ctx, name) + + if command is not None and not command.hidden: + yield name, command + + +def _check_multicommand( + base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False +) -> None: + if not base_command.chain or not isinstance(cmd, MultiCommand): + return + if register: + hint = ( + "It is not possible to add multi commands as children to" + " another multi command that is in chain mode." + ) + else: + hint = ( + "Found a multi command as subcommand to a multi command" + " that is in chain mode. This is not supported." + ) + raise RuntimeError( + f"{hint}. Command {base_command.name!r} is set to chain and" + f" {cmd_name!r} was added as a subcommand but it in itself is a" + f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" + f" within a chained {type(base_command).__name__} named" + f" {base_command.name!r})." + ) + + +def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: + return list(zip(*repeat(iter(iterable), batch_size))) + + +@contextmanager +def augment_usage_errors( + ctx: "Context", param: t.Optional["Parameter"] = None +) -> t.Iterator[None]: + """Context manager that attaches extra information to exceptions.""" + try: + yield + except BadParameter as e: + if e.ctx is None: + e.ctx = ctx + if param is not None and e.param is None: + e.param = param + raise + except UsageError as e: + if e.ctx is None: + e.ctx = ctx + raise + + +def iter_params_for_processing( + invocation_order: t.Sequence["Parameter"], + declaration_order: t.Sequence["Parameter"], +) -> t.List["Parameter"]: + """Given a sequence of parameters in the order as should be considered + for processing and an iterable of parameters that exist, this returns + a list in the correct order as they should be processed. + """ + + def sort_key(item: "Parameter") -> t.Tuple[bool, float]: + try: + idx: float = invocation_order.index(item) + except ValueError: + idx = float("inf") + + return not item.is_eager, idx + + return sorted(declaration_order, key=sort_key) + + +class ParameterSource(enum.Enum): + """This is an :class:`~enum.Enum` that indicates the source of a + parameter's value. + + Use :meth:`click.Context.get_parameter_source` to get the + source for a parameter by name. + + .. versionchanged:: 8.0 + Use :class:`~enum.Enum` and drop the ``validate`` method. + + .. versionchanged:: 8.0 + Added the ``PROMPT`` value. + """ + + COMMANDLINE = enum.auto() + """The value was provided by the command line args.""" + ENVIRONMENT = enum.auto() + """The value was provided with an environment variable.""" + DEFAULT = enum.auto() + """Used the default specified by the parameter.""" + DEFAULT_MAP = enum.auto() + """Used a default provided by :attr:`Context.default_map`.""" + PROMPT = enum.auto() + """Used a prompt to confirm a default or provide a value.""" + + +class Context: + """The context is a special internal object that holds state relevant + for the script execution at every single level. It's normally invisible + to commands unless they opt-in to getting access to it. + + The context is useful as it can pass internal objects around and can + control special execution features such as reading data from + environment variables. + + A context can be used as context manager in which case it will call + :meth:`close` on teardown. + + :param command: the command class for this context. + :param parent: the parent context. + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it is usually + the name of the script, for commands below it it's + the name of the script. + :param obj: an arbitrary object of user data. + :param auto_envvar_prefix: the prefix to use for automatic environment + variables. If this is `None` then reading + from environment variables is disabled. This + does not affect manually set environment + variables which are always read. + :param default_map: a dictionary (like object) with default values + for parameters. + :param terminal_width: the width of the terminal. The default is + inherit from parent context. If no context + defines the terminal width then auto + detection will be applied. + :param max_content_width: the maximum width for content rendered by + Click (this currently only affects help + pages). This defaults to 80 characters if + not overridden. In other words: even if the + terminal is larger than that, Click will not + format things wider than 80 characters by + default. In addition to that, formatters might + add some safety mapping on the right. + :param resilient_parsing: if this flag is enabled then Click will + parse without any interactivity or callback + invocation. Default values will also be + ignored. This is useful for implementing + things such as completion support. + :param allow_extra_args: if this is set to `True` then extra arguments + at the end will not raise an error and will be + kept on the context. The default is to inherit + from the command. + :param allow_interspersed_args: if this is set to `False` then options + and arguments cannot be mixed. The + default is to inherit from the command. + :param ignore_unknown_options: instructs click to ignore options it does + not know and keeps them for later + processing. + :param help_option_names: optionally a list of strings that define how + the default help parameter is named. The + default is ``['--help']``. + :param token_normalize_func: an optional function that is used to + normalize tokens (options, choices, + etc.). This for instance can be used to + implement case insensitive behavior. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are used in texts that Click prints which is by + default not the case. This for instance would affect + help output. + :param show_default: Show the default value for commands. If this + value is not set, it defaults to the value from the parent + context. ``Command.show_default`` overrides this default for the + specific command. + + .. versionchanged:: 8.1 + The ``show_default`` parameter is overridden by + ``Command.show_default``, instead of the other way around. + + .. versionchanged:: 8.0 + The ``show_default`` parameter defaults to the value from the + parent context. + + .. versionchanged:: 7.1 + Added the ``show_default`` parameter. + + .. versionchanged:: 4.0 + Added the ``color``, ``ignore_unknown_options``, and + ``max_content_width`` parameters. + + .. versionchanged:: 3.0 + Added the ``allow_extra_args`` and ``allow_interspersed_args`` + parameters. + + .. versionchanged:: 2.0 + Added the ``resilient_parsing``, ``help_option_names``, and + ``token_normalize_func`` parameters. + """ + + #: The formatter class to create with :meth:`make_formatter`. + #: + #: .. versionadded:: 8.0 + formatter_class: t.Type["HelpFormatter"] = HelpFormatter + + def __init__( + self, + command: "Command", + parent: t.Optional["Context"] = None, + info_name: t.Optional[str] = None, + obj: t.Optional[t.Any] = None, + auto_envvar_prefix: t.Optional[str] = None, + default_map: t.Optional[t.MutableMapping[str, t.Any]] = None, + terminal_width: t.Optional[int] = None, + max_content_width: t.Optional[int] = None, + resilient_parsing: bool = False, + allow_extra_args: t.Optional[bool] = None, + allow_interspersed_args: t.Optional[bool] = None, + ignore_unknown_options: t.Optional[bool] = None, + help_option_names: t.Optional[t.List[str]] = None, + token_normalize_func: t.Optional[t.Callable[[str], str]] = None, + color: t.Optional[bool] = None, + show_default: t.Optional[bool] = None, + ) -> None: + #: the parent context or `None` if none exists. + self.parent = parent + #: the :class:`Command` for this context. + self.command = command + #: the descriptive information name + self.info_name = info_name + #: Map of parameter names to their parsed values. Parameters + #: with ``expose_value=False`` are not stored. + self.params: t.Dict[str, t.Any] = {} + #: the leftover arguments. + self.args: t.List[str] = [] + #: protected arguments. These are arguments that are prepended + #: to `args` when certain parsing scenarios are encountered but + #: must be never propagated to another arguments. This is used + #: to implement nested parsing. + self.protected_args: t.List[str] = [] + #: the collected prefixes of the command's options. + self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() + + if obj is None and parent is not None: + obj = parent.obj + + #: the user object stored. + self.obj: t.Any = obj + self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) + + #: A dictionary (-like object) with defaults for parameters. + if ( + default_map is None + and info_name is not None + and parent is not None + and parent.default_map is not None + ): + default_map = parent.default_map.get(info_name) + + self.default_map: t.Optional[t.MutableMapping[str, t.Any]] = default_map + + #: This flag indicates if a subcommand is going to be executed. A + #: group callback can use this information to figure out if it's + #: being executed directly or because the execution flow passes + #: onwards to a subcommand. By default it's None, but it can be + #: the name of the subcommand to execute. + #: + #: If chaining is enabled this will be set to ``'*'`` in case + #: any commands are executed. It is however not possible to + #: figure out which ones. If you require this knowledge you + #: should use a :func:`result_callback`. + self.invoked_subcommand: t.Optional[str] = None + + if terminal_width is None and parent is not None: + terminal_width = parent.terminal_width + + #: The width of the terminal (None is autodetection). + self.terminal_width: t.Optional[int] = terminal_width + + if max_content_width is None and parent is not None: + max_content_width = parent.max_content_width + + #: The maximum width of formatted content (None implies a sensible + #: default which is 80 for most things). + self.max_content_width: t.Optional[int] = max_content_width + + if allow_extra_args is None: + allow_extra_args = command.allow_extra_args + + #: Indicates if the context allows extra args or if it should + #: fail on parsing. + #: + #: .. versionadded:: 3.0 + self.allow_extra_args = allow_extra_args + + if allow_interspersed_args is None: + allow_interspersed_args = command.allow_interspersed_args + + #: Indicates if the context allows mixing of arguments and + #: options or not. + #: + #: .. versionadded:: 3.0 + self.allow_interspersed_args: bool = allow_interspersed_args + + if ignore_unknown_options is None: + ignore_unknown_options = command.ignore_unknown_options + + #: Instructs click to ignore options that a command does not + #: understand and will store it on the context for later + #: processing. This is primarily useful for situations where you + #: want to call into external programs. Generally this pattern is + #: strongly discouraged because it's not possibly to losslessly + #: forward all arguments. + #: + #: .. versionadded:: 4.0 + self.ignore_unknown_options: bool = ignore_unknown_options + + if help_option_names is None: + if parent is not None: + help_option_names = parent.help_option_names + else: + help_option_names = ["--help"] + + #: The names for the help options. + self.help_option_names: t.List[str] = help_option_names + + if token_normalize_func is None and parent is not None: + token_normalize_func = parent.token_normalize_func + + #: An optional normalization function for tokens. This is + #: options, choices, commands etc. + self.token_normalize_func: t.Optional[ + t.Callable[[str], str] + ] = token_normalize_func + + #: Indicates if resilient parsing is enabled. In that case Click + #: will do its best to not cause any failures and default values + #: will be ignored. Useful for completion. + self.resilient_parsing: bool = resilient_parsing + + # If there is no envvar prefix yet, but the parent has one and + # the command on this level has a name, we can expand the envvar + # prefix automatically. + if auto_envvar_prefix is None: + if ( + parent is not None + and parent.auto_envvar_prefix is not None + and self.info_name is not None + ): + auto_envvar_prefix = ( + f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" + ) + else: + auto_envvar_prefix = auto_envvar_prefix.upper() + + if auto_envvar_prefix is not None: + auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") + + self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix + + if color is None and parent is not None: + color = parent.color + + #: Controls if styling output is wanted or not. + self.color: t.Optional[bool] = color + + if show_default is None and parent is not None: + show_default = parent.show_default + + #: Show option default values when formatting help text. + self.show_default: t.Optional[bool] = show_default + + self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] + self._depth = 0 + self._parameter_source: t.Dict[str, ParameterSource] = {} + self._exit_stack = ExitStack() + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire CLI + structure. + + .. code-block:: python + + with Context(cli) as ctx: + info = ctx.to_info_dict() + + .. versionadded:: 8.0 + """ + return { + "command": self.command.to_info_dict(self), + "info_name": self.info_name, + "allow_extra_args": self.allow_extra_args, + "allow_interspersed_args": self.allow_interspersed_args, + "ignore_unknown_options": self.ignore_unknown_options, + "auto_envvar_prefix": self.auto_envvar_prefix, + } + + def __enter__(self) -> "Context": + self._depth += 1 + push_context(self) + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self._depth -= 1 + if self._depth == 0: + self.close() + pop_context() + + @contextmanager + def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: + """This helper method can be used with the context object to promote + it to the current thread local (see :func:`get_current_context`). + The default behavior of this is to invoke the cleanup functions which + can be disabled by setting `cleanup` to `False`. The cleanup + functions are typically used for things such as closing file handles. + + If the cleanup is intended the context object can also be directly + used as a context manager. + + Example usage:: + + with ctx.scope(): + assert get_current_context() is ctx + + This is equivalent:: + + with ctx: + assert get_current_context() is ctx + + .. versionadded:: 5.0 + + :param cleanup: controls if the cleanup functions should be run or + not. The default is to run these functions. In + some situations the context only wants to be + temporarily pushed in which case this can be disabled. + Nested pushes automatically defer the cleanup. + """ + if not cleanup: + self._depth += 1 + try: + with self as rv: + yield rv + finally: + if not cleanup: + self._depth -= 1 + + @property + def meta(self) -> t.Dict[str, t.Any]: + """This is a dictionary which is shared with all the contexts + that are nested. It exists so that click utilities can store some + state here if they need to. It is however the responsibility of + that code to manage this dictionary well. + + The keys are supposed to be unique dotted strings. For instance + module paths are a good choice for it. What is stored in there is + irrelevant for the operation of click. However what is important is + that code that places data here adheres to the general semantics of + the system. + + Example usage:: + + LANG_KEY = f'{__name__}.lang' + + def set_language(value): + ctx = get_current_context() + ctx.meta[LANG_KEY] = value + + def get_language(): + return get_current_context().meta.get(LANG_KEY, 'en_US') + + .. versionadded:: 5.0 + """ + return self._meta + + def make_formatter(self) -> HelpFormatter: + """Creates the :class:`~click.HelpFormatter` for the help and + usage output. + + To quickly customize the formatter class used without overriding + this method, set the :attr:`formatter_class` attribute. + + .. versionchanged:: 8.0 + Added the :attr:`formatter_class` attribute. + """ + return self.formatter_class( + width=self.terminal_width, max_width=self.max_content_width + ) + + def with_resource(self, context_manager: t.ContextManager[V]) -> V: + """Register a resource as if it were used in a ``with`` + statement. The resource will be cleaned up when the context is + popped. + + Uses :meth:`contextlib.ExitStack.enter_context`. It calls the + resource's ``__enter__()`` method and returns the result. When + the context is popped, it closes the stack, which calls the + resource's ``__exit__()`` method. + + To register a cleanup function for something that isn't a + context manager, use :meth:`call_on_close`. Or use something + from :mod:`contextlib` to turn it into a context manager first. + + .. code-block:: python + + @click.group() + @click.option("--name") + @click.pass_context + def cli(ctx): + ctx.obj = ctx.with_resource(connect_db(name)) + + :param context_manager: The context manager to enter. + :return: Whatever ``context_manager.__enter__()`` returns. + + .. versionadded:: 8.0 + """ + return self._exit_stack.enter_context(context_manager) + + def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: + """Register a function to be called when the context tears down. + + This can be used to close resources opened during the script + execution. Resources that support Python's context manager + protocol which would be used in a ``with`` statement should be + registered with :meth:`with_resource` instead. + + :param f: The function to execute on teardown. + """ + return self._exit_stack.callback(f) + + def close(self) -> None: + """Invoke all close callbacks registered with + :meth:`call_on_close`, and exit all context managers entered + with :meth:`with_resource`. + """ + self._exit_stack.close() + # In case the context is reused, create a new exit stack. + self._exit_stack = ExitStack() + + @property + def command_path(self) -> str: + """The computed command path. This is used for the ``usage`` + information on the help page. It's automatically created by + combining the info names of the chain of contexts to the root. + """ + rv = "" + if self.info_name is not None: + rv = self.info_name + if self.parent is not None: + parent_command_path = [self.parent.command_path] + + if isinstance(self.parent.command, Command): + for param in self.parent.command.get_params(self): + parent_command_path.extend(param.get_usage_pieces(self)) + + rv = f"{' '.join(parent_command_path)} {rv}" + return rv.lstrip() + + def find_root(self) -> "Context": + """Finds the outermost context.""" + node = self + while node.parent is not None: + node = node.parent + return node + + def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: + """Finds the closest object of a given type.""" + node: t.Optional["Context"] = self + + while node is not None: + if isinstance(node.obj, object_type): + return node.obj + + node = node.parent + + return None + + def ensure_object(self, object_type: t.Type[V]) -> V: + """Like :meth:`find_object` but sets the innermost object to a + new instance of `object_type` if it does not exist. + """ + rv = self.find_object(object_type) + if rv is None: + self.obj = rv = object_type() + return rv + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def lookup_default( + self, name: str, call: "te.Literal[False]" = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: + """Get the default for a parameter from :attr:`default_map`. + + :param name: Name of the parameter. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + if self.default_map is not None: + value = self.default_map.get(name) + + if call and callable(value): + return value() + + return value + + return None + + def fail(self, message: str) -> "te.NoReturn": + """Aborts the execution of the program with a specific error + message. + + :param message: the error message to fail with. + """ + raise UsageError(message, self) + + def abort(self) -> "te.NoReturn": + """Aborts the script.""" + raise Abort() + + def exit(self, code: int = 0) -> "te.NoReturn": + """Exits the application with a given exit code.""" + raise Exit(code) + + def get_usage(self) -> str: + """Helper method to get formatted usage string for the current + context and command. + """ + return self.command.get_usage(self) + + def get_help(self) -> str: + """Helper method to get formatted help page for the current + context and command. + """ + return self.command.get_help(self) + + def _make_sub_context(self, command: "Command") -> "Context": + """Create a new context of the same type as this context, but + for a new command. + + :meta private: + """ + return type(self)(command, info_name=command.name, parent=self) + + @t.overload + def invoke( + __self, # noqa: B902 + __callback: "t.Callable[..., V]", + *args: t.Any, + **kwargs: t.Any, + ) -> V: + ... + + @t.overload + def invoke( + __self, # noqa: B902 + __callback: "Command", + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: + ... + + def invoke( + __self, # noqa: B902 + __callback: t.Union["Command", "t.Callable[..., V]"], + *args: t.Any, + **kwargs: t.Any, + ) -> t.Union[t.Any, V]: + """Invokes a command callback in exactly the way it expects. There + are two ways to invoke this method: + + 1. the first argument can be a callback and all other arguments and + keyword arguments are forwarded directly to the function. + 2. the first argument is a click command object. In that case all + arguments are forwarded as well but proper click parameters + (options and click arguments) must be keyword arguments and Click + will fill in defaults. + + Note that before Click 3.2 keyword arguments were not properly filled + in against the intention of this code and no context was created. For + more information about this change and why it was done in a bugfix + release see :ref:`upgrade-to-3.2`. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if :meth:`forward` is called at multiple levels. + """ + if isinstance(__callback, Command): + other_cmd = __callback + + if other_cmd.callback is None: + raise TypeError( + "The given command does not have a callback that can be invoked." + ) + else: + __callback = t.cast("t.Callable[..., V]", other_cmd.callback) + + ctx = __self._make_sub_context(other_cmd) + + for param in other_cmd.params: + if param.name not in kwargs and param.expose_value: + kwargs[param.name] = param.type_cast_value( # type: ignore + ctx, param.get_default(ctx) + ) + + # Track all kwargs as params, so that forward() will pass + # them on in subsequent calls. + ctx.params.update(kwargs) + else: + ctx = __self + + with augment_usage_errors(__self): + with ctx: + return __callback(*args, **kwargs) + + def forward( + __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 + ) -> t.Any: + """Similar to :meth:`invoke` but fills in default keyword + arguments from the current context if the other command expects + it. This cannot invoke callbacks directly, only other commands. + + .. versionchanged:: 8.0 + All ``kwargs`` are tracked in :attr:`params` so they will be + passed if ``forward`` is called at multiple levels. + """ + # Can only forward to other commands, not direct callbacks. + if not isinstance(__cmd, Command): + raise TypeError("Callback is not a command.") + + for param in __self.params: + if param not in kwargs: + kwargs[param] = __self.params[param] + + return __self.invoke(__cmd, *args, **kwargs) + + def set_parameter_source(self, name: str, source: ParameterSource) -> None: + """Set the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + :param name: The name of the parameter. + :param source: A member of :class:`~click.core.ParameterSource`. + """ + self._parameter_source[name] = source + + def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: + """Get the source of a parameter. This indicates the location + from which the value of the parameter was obtained. + + This can be useful for determining when a user specified a value + on the command line that is the same as the default value. It + will be :attr:`~click.core.ParameterSource.DEFAULT` only if the + value was actually taken from the default. + + :param name: The name of the parameter. + :rtype: ParameterSource + + .. versionchanged:: 8.0 + Returns ``None`` if the parameter was not provided from any + source. + """ + return self._parameter_source.get(name) + + +class BaseCommand: + """The base command implements the minimal API contract of commands. + Most code will never use this as it does not implement a lot of useful + functionality but it can act as the direct subclass of alternative + parsing methods that do not depend on the Click parser. + + For instance, this can be used to bridge Click and other systems like + argparse or docopt. + + Because base commands do not implement a lot of the API that other + parts of Click take for granted, they are not supported for all + operations. For instance, they cannot be used with the decorators + usually and they have no built-in callback system. + + .. versionchanged:: 2.0 + Added the `context_settings` parameter. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + """ + + #: The context class to create with :meth:`make_context`. + #: + #: .. versionadded:: 8.0 + context_class: t.Type[Context] = Context + #: the default for the :attr:`Context.allow_extra_args` flag. + allow_extra_args = False + #: the default for the :attr:`Context.allow_interspersed_args` flag. + allow_interspersed_args = True + #: the default for the :attr:`Context.ignore_unknown_options` flag. + ignore_unknown_options = False + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> None: + #: the name the command thinks it has. Upon registering a command + #: on a :class:`Group` the group will default the command name + #: with this information. You should instead use the + #: :class:`Context`\'s :attr:`~Context.info_name` attribute. + self.name = name + + if context_settings is None: + context_settings = {} + + #: an optional dictionary with defaults passed to the context. + self.context_settings: t.MutableMapping[str, t.Any] = context_settings + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. This traverses the entire structure + below this command. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + :param ctx: A :class:`Context` representing this command. + + .. versionadded:: 8.0 + """ + return {"name": self.name} + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def get_usage(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get usage") + + def get_help(self, ctx: Context) -> str: + raise NotImplementedError("Base commands cannot get help") + + def make_context( + self, + info_name: t.Optional[str], + args: t.List[str], + parent: t.Optional[Context] = None, + **extra: t.Any, + ) -> Context: + """This function when given an info name and arguments will kick + off the parsing and create a new :class:`Context`. It does not + invoke the actual command callback though. + + To quickly customize the context class used without overriding + this method, set the :attr:`context_class` attribute. + + :param info_name: the info name for this invocation. Generally this + is the most descriptive name for the script or + command. For the toplevel script it's usually + the name of the script, for commands below it's + the name of the command. + :param args: the arguments to parse as list of strings. + :param parent: the parent context if available. + :param extra: extra keyword arguments forwarded to the context + constructor. + + .. versionchanged:: 8.0 + Added the :attr:`context_class` attribute. + """ + for key, value in self.context_settings.items(): + if key not in extra: + extra[key] = value + + ctx = self.context_class( + self, info_name=info_name, parent=parent, **extra # type: ignore + ) + + with ctx.scope(cleanup=False): + self.parse_args(ctx, args) + return ctx + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + """Given a context and a list of arguments this creates the parser + and parses the arguments, then modifies the context as necessary. + This is automatically invoked by :meth:`make_context`. + """ + raise NotImplementedError("Base commands do not know how to parse arguments.") + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the command. The default + implementation is raising a not implemented error. + """ + raise NotImplementedError("Base commands are not invocable by default") + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of chained multi-commands. + + Any command could be part of a chained multi-command, so sibling + commands are valid at any point during command completion. Other + command classes will return more completions. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + while ctx.parent is not None: + ctx = ctx.parent + + if isinstance(ctx.command, MultiCommand) and ctx.command.chain: + results.extend( + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + if name not in ctx.protected_args + ) + + return results + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: "te.Literal[True]" = True, + **extra: t.Any, + ) -> "te.NoReturn": + ... + + @t.overload + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = ..., + **extra: t.Any, + ) -> t.Any: + ... + + def main( + self, + args: t.Optional[t.Sequence[str]] = None, + prog_name: t.Optional[str] = None, + complete_var: t.Optional[str] = None, + standalone_mode: bool = True, + windows_expand_args: bool = True, + **extra: t.Any, + ) -> t.Any: + """This is the way to invoke a script with all the bells and + whistles as a command line application. This will always terminate + the application after a call. If this is not wanted, ``SystemExit`` + needs to be caught. + + This method is also available by directly calling the instance of + a :class:`Command`. + + :param args: the arguments that should be used for parsing. If not + provided, ``sys.argv[1:]`` is used. + :param prog_name: the program name that should be used. By default + the program name is constructed by taking the file + name from ``sys.argv[0]``. + :param complete_var: the environment variable that controls the + bash completion support. The default is + ``"_<prog_name>_COMPLETE"`` with prog_name in + uppercase. + :param standalone_mode: the default behavior is to invoke the script + in standalone mode. Click will then + handle exceptions and convert them into + error messages and the function will never + return but shut down the interpreter. If + this is set to `False` they will be + propagated to the caller and the return + value of this function is the return value + of :meth:`invoke`. + :param windows_expand_args: Expand glob patterns, user dir, and + env vars in command line args on Windows. + :param extra: extra keyword arguments are forwarded to the context + constructor. See :class:`Context` for more information. + + .. versionchanged:: 8.0.1 + Added the ``windows_expand_args`` parameter to allow + disabling command line arg expansion on Windows. + + .. versionchanged:: 8.0 + When taking arguments from ``sys.argv`` on Windows, glob + patterns, user dir, and env vars are expanded. + + .. versionchanged:: 3.0 + Added the ``standalone_mode`` parameter. + """ + if args is None: + args = sys.argv[1:] + + if os.name == "nt" and windows_expand_args: + args = _expand_args(args) + else: + args = list(args) + + if prog_name is None: + prog_name = _detect_program_name() + + # Process shell completion requests and exit early. + self._main_shell_completion(extra, prog_name, complete_var) + + try: + try: + with self.make_context(prog_name, args, **extra) as ctx: + rv = self.invoke(ctx) + if not standalone_mode: + return rv + # it's not safe to `ctx.exit(rv)` here! + # note that `rv` may actually contain data like "1" which + # has obvious effects + # more subtle case: `rv=[None, None]` can come out of + # chained commands which all returned `None` -- so it's not + # even always obvious that `rv` indicates success/failure + # by its truthiness/falsiness + ctx.exit() + except (EOFError, KeyboardInterrupt) as e: + echo(file=sys.stderr) + raise Abort() from e + except ClickException as e: + if not standalone_mode: + raise + e.show() + sys.exit(e.exit_code) + except OSError as e: + if e.errno == errno.EPIPE: + sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) + sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) + sys.exit(1) + else: + raise + except Exit as e: + if standalone_mode: + sys.exit(e.exit_code) + else: + # in non-standalone mode, return the exit code + # note that this is only reached if `self.invoke` above raises + # an Exit explicitly -- thus bypassing the check there which + # would return its result + # the results of non-standalone execution may therefore be + # somewhat ambiguous: if there are codepaths which lead to + # `ctx.exit(1)` and to `return 1`, the caller won't be able to + # tell the difference between the two + return e.exit_code + except Abort: + if not standalone_mode: + raise + echo(_("Aborted!"), file=sys.stderr) + sys.exit(1) + + def _main_shell_completion( + self, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: t.Optional[str] = None, + ) -> None: + """Check if the shell is asking for tab completion, process + that, then exit early. Called from :meth:`main` before the + program is invoked. + + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. Defaults to + ``_{PROG_NAME}_COMPLETE``. + + .. versionchanged:: 8.2.0 + Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). + """ + if complete_var is None: + complete_name = prog_name.replace("-", "_").replace(".", "_") + complete_var = f"_{complete_name}_COMPLETE".upper() + + instruction = os.environ.get(complete_var) + + if not instruction: + return + + from .shell_completion import shell_complete + + rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) + sys.exit(rv) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Alias for :meth:`main`.""" + return self.main(*args, **kwargs) + + +class Command(BaseCommand): + """Commands are the basic building block of command line interfaces in + Click. A basic command handles command line parsing and might dispatch + more parsing to commands nested below it. + + :param name: the name of the command to use unless a group overrides it. + :param context_settings: an optional dictionary with defaults that are + passed to the context object. + :param callback: the callback to invoke. This is optional. + :param params: the parameters to register with this command. This can + be either :class:`Option` or :class:`Argument` objects. + :param help: the help string to use for this command. + :param epilog: like the help string but it's printed at the end of the + help page after everything else. + :param short_help: the short help to use for this command. This is + shown on the command listing of the parent command. + :param add_help_option: by default each command registers a ``--help`` + option. This can be disabled by this parameter. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is disabled by default. + If enabled this will add ``--help`` as argument + if no arguments are passed + :param hidden: hide this command from help outputs. + + :param deprecated: issues a message indicating that + the command is deprecated. + + .. versionchanged:: 8.1 + ``help``, ``epilog``, and ``short_help`` are stored unprocessed, + all formatting is done when outputting help text, not at init, + and is done even if not using the ``@command`` decorator. + + .. versionchanged:: 8.0 + Added a ``repr`` showing the command name. + + .. versionchanged:: 7.1 + Added the ``no_args_is_help`` parameter. + + .. versionchanged:: 2.0 + Added the ``context_settings`` parameter. + """ + + def __init__( + self, + name: t.Optional[str], + context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, + callback: t.Optional[t.Callable[..., t.Any]] = None, + params: t.Optional[t.List["Parameter"]] = None, + help: t.Optional[str] = None, + epilog: t.Optional[str] = None, + short_help: t.Optional[str] = None, + options_metavar: t.Optional[str] = "[OPTIONS]", + add_help_option: bool = True, + no_args_is_help: bool = False, + hidden: bool = False, + deprecated: bool = False, + ) -> None: + super().__init__(name, context_settings) + #: the callback to execute when the command fires. This might be + #: `None` in which case nothing happens. + self.callback = callback + #: the list of parameters for this command in the order they + #: should show up in the help page and execute. Eager parameters + #: will automatically be handled before non eager ones. + self.params: t.List["Parameter"] = params or [] + self.help = help + self.epilog = epilog + self.options_metavar = options_metavar + self.short_help = short_help + self.add_help_option = add_help_option + self.no_args_is_help = no_args_is_help + self.hidden = hidden + self.deprecated = deprecated + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + info_dict.update( + params=[param.to_info_dict() for param in self.get_params(ctx)], + help=self.help, + epilog=self.epilog, + short_help=self.short_help, + hidden=self.hidden, + deprecated=self.deprecated, + ) + return info_dict + + def get_usage(self, ctx: Context) -> str: + """Formats the usage line into a string and returns it. + + Calls :meth:`format_usage` internally. + """ + formatter = ctx.make_formatter() + self.format_usage(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_params(self, ctx: Context) -> t.List["Parameter"]: + rv = self.params + help_option = self.get_help_option(ctx) + + if help_option is not None: + rv = [*rv, help_option] + + return rv + + def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the usage line into the formatter. + + This is a low-level method called by :meth:`get_usage`. + """ + pieces = self.collect_usage_pieces(ctx) + formatter.write_usage(ctx.command_path, " ".join(pieces)) + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + """Returns all the pieces that go into the usage line and returns + it as a list of strings. + """ + rv = [self.options_metavar] if self.options_metavar else [] + + for param in self.get_params(ctx): + rv.extend(param.get_usage_pieces(ctx)) + + return rv + + def get_help_option_names(self, ctx: Context) -> t.List[str]: + """Returns the names for the help option.""" + all_names = set(ctx.help_option_names) + for param in self.params: + all_names.difference_update(param.opts) + all_names.difference_update(param.secondary_opts) + return list(all_names) + + def get_help_option(self, ctx: Context) -> t.Optional["Option"]: + """Returns the help option object.""" + help_options = self.get_help_option_names(ctx) + + if not help_options or not self.add_help_option: + return None + + def show_help(ctx: Context, param: "Parameter", value: str) -> None: + if value and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + return Option( + help_options, + is_flag=True, + is_eager=True, + expose_value=False, + callback=show_help, + help=_("Show this message and exit."), + ) + + def make_parser(self, ctx: Context) -> OptionParser: + """Creates the underlying option parser for this command.""" + parser = OptionParser(ctx) + for param in self.get_params(ctx): + param.add_to_parser(parser, ctx) + return parser + + def get_help(self, ctx: Context) -> str: + """Formats the help into a string and returns it. + + Calls :meth:`format_help` internally. + """ + formatter = ctx.make_formatter() + self.format_help(ctx, formatter) + return formatter.getvalue().rstrip("\n") + + def get_short_help_str(self, limit: int = 45) -> str: + """Gets short help for the command or makes it by shortening the + long help string. + """ + if self.short_help: + text = inspect.cleandoc(self.short_help) + elif self.help: + text = make_default_short_help(self.help, limit) + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + return text.strip() + + def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help into the formatter if it exists. + + This is a low-level method called by :meth:`get_help`. + + This calls the following methods: + + - :meth:`format_usage` + - :meth:`format_help_text` + - :meth:`format_options` + - :meth:`format_epilog` + """ + self.format_usage(ctx, formatter) + self.format_help_text(ctx, formatter) + self.format_options(ctx, formatter) + self.format_epilog(ctx, formatter) + + def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the help text to the formatter if it exists.""" + if self.help is not None: + # truncate the help text to the first form feed + text = inspect.cleandoc(self.help).partition("\f")[0] + else: + text = "" + + if self.deprecated: + text = _("(Deprecated) {text}").format(text=text) + + if text: + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(text) + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes all the options into the formatter if they exist.""" + opts = [] + for param in self.get_params(ctx): + rv = param.get_help_record(ctx) + if rv is not None: + opts.append(rv) + + if opts: + with formatter.section(_("Options")): + formatter.write_dl(opts) + + def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: + """Writes the epilog into the formatter if it exists.""" + if self.epilog: + epilog = inspect.cleandoc(self.epilog) + formatter.write_paragraph() + + with formatter.indentation(): + formatter.write_text(epilog) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + parser = self.make_parser(ctx) + opts, args, param_order = parser.parse_args(args=args) + + for param in iter_params_for_processing(param_order, self.get_params(ctx)): + value, args = param.handle_parse_result(ctx, opts, args) + + if args and not ctx.allow_extra_args and not ctx.resilient_parsing: + ctx.fail( + ngettext( + "Got unexpected extra argument ({args})", + "Got unexpected extra arguments ({args})", + len(args), + ).format(args=" ".join(map(str, args))) + ) + + ctx.args = args + ctx._opt_prefixes.update(parser._opt_prefixes) + return args + + def invoke(self, ctx: Context) -> t.Any: + """Given a context, this invokes the attached callback (if it exists) + in the right way. + """ + if self.deprecated: + message = _( + "DeprecationWarning: The command {name!r} is deprecated." + ).format(name=self.name) + echo(style(message, fg="red"), err=True) + + if self.callback is not None: + return ctx.invoke(self.callback, **ctx.params) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options and chained multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results: t.List["CompletionItem"] = [] + + if incomplete and not incomplete[0].isalnum(): + for param in self.get_params(ctx): + if ( + not isinstance(param, Option) + or param.hidden + or ( + not param.multiple + and ctx.get_parameter_source(param.name) # type: ignore + is ParameterSource.COMMANDLINE + ) + ): + continue + + results.extend( + CompletionItem(name, help=param.help) + for name in [*param.opts, *param.secondary_opts] + if name.startswith(incomplete) + ) + + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class MultiCommand(Command): + """A multi command is the basic implementation of a command that + dispatches to subcommands. The most common version is the + :class:`Group`. + + :param invoke_without_command: this controls how the multi command itself + is invoked. By default it's only invoked + if a subcommand is provided. + :param no_args_is_help: this controls what happens if no arguments are + provided. This option is enabled by default if + `invoke_without_command` is disabled or disabled + if it's enabled. If enabled this will add + ``--help`` as argument if no arguments are + passed. + :param subcommand_metavar: the string that is used in the documentation + to indicate the subcommand place. + :param chain: if this is set to `True` chaining of multiple subcommands + is enabled. This restricts the form of commands in that + they cannot have optional arguments but it allows + multiple commands to be chained together. + :param result_callback: The result callback to attach to this multi + command. This can be set or changed later with the + :meth:`result_callback` decorator. + :param attrs: Other command arguments described in :class:`Command`. + """ + + allow_extra_args = True + allow_interspersed_args = False + + def __init__( + self, + name: t.Optional[str] = None, + invoke_without_command: bool = False, + no_args_is_help: t.Optional[bool] = None, + subcommand_metavar: t.Optional[str] = None, + chain: bool = False, + result_callback: t.Optional[t.Callable[..., t.Any]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if no_args_is_help is None: + no_args_is_help = not invoke_without_command + + self.no_args_is_help = no_args_is_help + self.invoke_without_command = invoke_without_command + + if subcommand_metavar is None: + if chain: + subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." + else: + subcommand_metavar = "COMMAND [ARGS]..." + + self.subcommand_metavar = subcommand_metavar + self.chain = chain + # The result callback that is stored. This can be set or + # overridden with the :func:`result_callback` decorator. + self._result_callback = result_callback + + if self.chain: + for param in self.params: + if isinstance(param, Argument) and not param.required: + raise RuntimeError( + "Multi commands in chain mode cannot have" + " optional arguments." + ) + + def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict(ctx) + commands = {} + + for name in self.list_commands(ctx): + command = self.get_command(ctx, name) + + if command is None: + continue + + sub_ctx = ctx._make_sub_context(command) + + with sub_ctx.scope(cleanup=False): + commands[name] = command.to_info_dict(sub_ctx) + + info_dict.update(commands=commands, chain=self.chain) + return info_dict + + def collect_usage_pieces(self, ctx: Context) -> t.List[str]: + rv = super().collect_usage_pieces(ctx) + rv.append(self.subcommand_metavar) + return rv + + def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: + super().format_options(ctx, formatter) + self.format_commands(ctx, formatter) + + def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: + """Adds a result callback to the command. By default if a + result callback is already registered this will chain them but + this can be disabled with the `replace` parameter. The result + callback is invoked with the return value of the subcommand + (or the list of return values from all subcommands if chaining + is enabled) as well as the parameters as they would be passed + to the main callback. + + Example:: + + @click.group() + @click.option('-i', '--input', default=23) + def cli(input): + return 42 + + @cli.result_callback() + def process_result(result, input): + return result + input + + :param replace: if set to `True` an already existing result + callback will be removed. + + .. versionchanged:: 8.0 + Renamed from ``resultcallback``. + + .. versionadded:: 3.0 + """ + + def decorator(f: F) -> F: + old_callback = self._result_callback + + if old_callback is None or replace: + self._result_callback = f + return f + + def function(__value, *args, **kwargs): # type: ignore + inner = old_callback(__value, *args, **kwargs) + return f(inner, *args, **kwargs) + + self._result_callback = rv = update_wrapper(t.cast(F, function), f) + return rv + + return decorator + + def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: + """Extra format methods for multi methods that adds all the commands + after the options. + """ + commands = [] + for subcommand in self.list_commands(ctx): + cmd = self.get_command(ctx, subcommand) + # What is this, the tool lied about a command. Ignore it + if cmd is None: + continue + if cmd.hidden: + continue + + commands.append((subcommand, cmd)) + + # allow for 3 times the default spacing + if len(commands): + limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) + + rows = [] + for subcommand, cmd in commands: + help = cmd.get_short_help_str(limit) + rows.append((subcommand, help)) + + if rows: + with formatter.section(_("Commands")): + formatter.write_dl(rows) + + def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: + if not args and self.no_args_is_help and not ctx.resilient_parsing: + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + rest = super().parse_args(ctx, args) + + if self.chain: + ctx.protected_args = rest + ctx.args = [] + elif rest: + ctx.protected_args, ctx.args = rest[:1], rest[1:] + + return ctx.args + + def invoke(self, ctx: Context) -> t.Any: + def _process_result(value: t.Any) -> t.Any: + if self._result_callback is not None: + value = ctx.invoke(self._result_callback, value, **ctx.params) + return value + + if not ctx.protected_args: + if self.invoke_without_command: + # No subcommand was invoked, so the result callback is + # invoked with the group return value for regular + # groups, or an empty list for chained groups. + with ctx: + rv = super().invoke(ctx) + return _process_result([] if self.chain else rv) + ctx.fail(_("Missing command.")) + + # Fetch args back out + args = [*ctx.protected_args, *ctx.args] + ctx.args = [] + ctx.protected_args = [] + + # If we're not in chain mode, we only allow the invocation of a + # single command but we also inform the current context about the + # name of the command to invoke. + if not self.chain: + # Make sure the context is entered so we do not clean up + # resources until the result processor has worked. + with ctx: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + ctx.invoked_subcommand = cmd_name + super().invoke(ctx) + sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) + with sub_ctx: + return _process_result(sub_ctx.command.invoke(sub_ctx)) + + # In chain mode we create the contexts step by step, but after the + # base command has been invoked. Because at that point we do not + # know the subcommands yet, the invoked subcommand attribute is + # set to ``*`` to inform the command that subcommands are executed + # but nothing else. + with ctx: + ctx.invoked_subcommand = "*" if args else None + super().invoke(ctx) + + # Otherwise we make every single context and invoke them in a + # chain. In that case the return value to the result processor + # is the list of all invoked subcommand's results. + contexts = [] + while args: + cmd_name, cmd, args = self.resolve_command(ctx, args) + assert cmd is not None + sub_ctx = cmd.make_context( + cmd_name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + ) + contexts.append(sub_ctx) + args, sub_ctx.args = sub_ctx.args, [] + + rv = [] + for sub_ctx in contexts: + with sub_ctx: + rv.append(sub_ctx.command.invoke(sub_ctx)) + return _process_result(rv) + + def resolve_command( + self, ctx: Context, args: t.List[str] + ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: + cmd_name = make_str(args[0]) + original_cmd_name = cmd_name + + # Get the command + cmd = self.get_command(ctx, cmd_name) + + # If we can't find the command but there is a normalization + # function available, we try with that one. + if cmd is None and ctx.token_normalize_func is not None: + cmd_name = ctx.token_normalize_func(cmd_name) + cmd = self.get_command(ctx, cmd_name) + + # If we don't find the command we want to show an error message + # to the user that it was not provided. However, there is + # something else we should do: if the first argument looks like + # an option we want to kick off parsing again for arguments to + # resolve things like --help which now should go to the main + # place. + if cmd is None and not ctx.resilient_parsing: + if split_opt(cmd_name)[0]: + self.parse_args(ctx, ctx.args) + ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) + return cmd_name if cmd else None, cmd, args[1:] + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + """Given a context and a command name, this returns a + :class:`Command` object if it exists or returns `None`. + """ + raise NotImplementedError + + def list_commands(self, ctx: Context) -> t.List[str]: + """Returns a list of subcommand names in the order they should + appear. + """ + return [] + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. Looks + at the names of options, subcommands, and chained + multi-commands. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + results = [ + CompletionItem(name, help=command.get_short_help_str()) + for name, command in _complete_visible_commands(ctx, incomplete) + ] + results.extend(super().shell_complete(ctx, incomplete)) + return results + + +class Group(MultiCommand): + """A group allows a command to have subcommands attached. This is + the most common way to implement nesting in Click. + + :param name: The name of the group command. + :param commands: A dict mapping names to :class:`Command` objects. + Can also be a list of :class:`Command`, which will use + :attr:`Command.name` to create the dict. + :param attrs: Other command arguments described in + :class:`MultiCommand`, :class:`Command`, and + :class:`BaseCommand`. + + .. versionchanged:: 8.0 + The ``commands`` argument can be a list of command objects. + """ + + #: If set, this is used by the group's :meth:`command` decorator + #: as the default :class:`Command` class. This is useful to make all + #: subcommands use a custom command class. + #: + #: .. versionadded:: 8.0 + command_class: t.Optional[t.Type[Command]] = None + + #: If set, this is used by the group's :meth:`group` decorator + #: as the default :class:`Group` class. This is useful to make all + #: subgroups use a custom group class. + #: + #: If set to the special value :class:`type` (literally + #: ``group_class = type``), this group's class will be used as the + #: default class. This makes a custom group class continue to make + #: custom groups. + #: + #: .. versionadded:: 8.0 + group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None + # Literal[type] isn't valid, so use Type[type] + + def __init__( + self, + name: t.Optional[str] = None, + commands: t.Optional[ + t.Union[t.MutableMapping[str, Command], t.Sequence[Command]] + ] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + + if commands is None: + commands = {} + elif isinstance(commands, abc.Sequence): + commands = {c.name: c for c in commands if c.name is not None} + + #: The registered subcommands by their exported names. + self.commands: t.MutableMapping[str, Command] = commands + + def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: + """Registers another :class:`Command` with this group. If the name + is not provided, the name of the command is used. + """ + name = name or cmd.name + if name is None: + raise TypeError("Command has no name.") + _check_multicommand(self, name, cmd, register=True) + self.commands[name] = cmd + + @t.overload + def command(self, __func: t.Callable[..., t.Any]) -> Command: + ... + + @t.overload + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], Command]: + ... + + def command( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: + """A shortcut decorator for declaring and attaching a command to + the group. This takes the same arguments as :func:`command` and + immediately registers the created command with this group by + calling :meth:`add_command`. + + To customize the command class used, set the + :attr:`command_class` attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`command_class` attribute. + """ + from .decorators import command + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'command(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.command_class and kwargs.get("cls") is None: + kwargs["cls"] = self.command_class + + def decorator(f: t.Callable[..., t.Any]) -> Command: + cmd: Command = command(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + @t.overload + def group(self, __func: t.Callable[..., t.Any]) -> "Group": + ... + + @t.overload + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: + ... + + def group( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: + """A shortcut decorator for declaring and attaching a group to + the group. This takes the same arguments as :func:`group` and + immediately registers the created group with this group by + calling :meth:`add_command`. + + To customize the group class used, set the :attr:`group_class` + attribute. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.0 + Added the :attr:`group_class` attribute. + """ + from .decorators import group + + func: t.Optional[t.Callable[..., t.Any]] = None + + if args and callable(args[0]): + assert ( + len(args) == 1 and not kwargs + ), "Use 'group(**kwargs)(callable)' to provide arguments." + (func,) = args + args = () + + if self.group_class is not None and kwargs.get("cls") is None: + if self.group_class is type: + kwargs["cls"] = type(self) + else: + kwargs["cls"] = self.group_class + + def decorator(f: t.Callable[..., t.Any]) -> "Group": + cmd: Group = group(*args, **kwargs)(f) + self.add_command(cmd) + return cmd + + if func is not None: + return decorator(func) + + return decorator + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + return self.commands.get(cmd_name) + + def list_commands(self, ctx: Context) -> t.List[str]: + return sorted(self.commands) + + +class CommandCollection(MultiCommand): + """A command collection is a multi command that merges multiple multi + commands together into one. This is a straightforward implementation + that accepts a list of different multi commands as sources and + provides all the commands for each of them. + + See :class:`MultiCommand` and :class:`Command` for the description of + ``name`` and ``attrs``. + """ + + def __init__( + self, + name: t.Optional[str] = None, + sources: t.Optional[t.List[MultiCommand]] = None, + **attrs: t.Any, + ) -> None: + super().__init__(name, **attrs) + #: The list of registered multi commands. + self.sources: t.List[MultiCommand] = sources or [] + + def add_source(self, multi_cmd: MultiCommand) -> None: + """Adds a new multi command to the chain dispatcher.""" + self.sources.append(multi_cmd) + + def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: + for source in self.sources: + rv = source.get_command(ctx, cmd_name) + + if rv is not None: + if self.chain: + _check_multicommand(self, cmd_name, rv) + + return rv + + return None + + def list_commands(self, ctx: Context) -> t.List[str]: + rv: t.Set[str] = set() + + for source in self.sources: + rv.update(source.list_commands(ctx)) + + return sorted(rv) + + +def _check_iter(value: t.Any) -> t.Iterator[t.Any]: + """Check if the value is iterable but not a string. Raises a type + error, or return an iterator over the value. + """ + if isinstance(value, str): + raise TypeError + + return iter(value) + + +class Parameter: + r"""A parameter to a command comes in two versions: they are either + :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently + not supported by design as some of the internals for parsing are + intentionally not finalized. + + Some settings are supported by both options and arguments. + + :param param_decls: the parameter declarations for this option or + argument. This is a list of flags or argument + names. + :param type: the type that should be used. Either a :class:`ParamType` + or a Python type. The latter is converted into the former + automatically if supported. + :param required: controls if this is optional or not. + :param default: the default value if omitted. This can also be a callable, + in which case it's invoked when the default is needed + without any arguments. + :param callback: A function to further process or validate the value + after type conversion. It is called as ``f(ctx, param, value)`` + and must return the value. It is called for all sources, + including prompts. + :param nargs: the number of arguments to match. If not ``1`` the return + value is a tuple instead of single value. The default for + nargs is ``1`` (except if the type is a tuple, then it's + the arity of the tuple). If ``nargs=-1``, all remaining + parameters are collected. + :param metavar: how the value is represented in the help page. + :param expose_value: if this is `True` then the value is passed onwards + to the command callback and stored on the context, + otherwise it's skipped. + :param is_eager: eager values are processed before non eager ones. This + should not be set for arguments or it will inverse the + order of processing. + :param envvar: a string or list of strings that are environment variables + that should be checked. + :param shell_complete: A function that returns custom shell + completions. Used instead of the param's type completion if + given. Takes ``ctx, param, incomplete`` and must return a list + of :class:`~click.shell_completion.CompletionItem` or a list of + strings. + + .. versionchanged:: 8.0 + ``process_value`` validates required parameters and bounded + ``nargs``, and invokes the parameter callback before returning + the value. This allows the callback to validate prompts. + ``full_process_value`` is removed. + + .. versionchanged:: 8.0 + ``autocompletion`` is renamed to ``shell_complete`` and has new + semantics described above. The old name is deprecated and will + be removed in 8.1, until then it will be wrapped to match the + new requirements. + + .. versionchanged:: 8.0 + For ``multiple=True, nargs>1``, the default must be a list of + tuples. + + .. versionchanged:: 8.0 + Setting a default is no longer required for ``nargs>1``, it will + default to ``None``. ``multiple=True`` or ``nargs=-1`` will + default to ``()``. + + .. versionchanged:: 7.1 + Empty environment variables are ignored rather than taking the + empty string value. This makes it possible for scripts to clear + variables if they can't unset them. + + .. versionchanged:: 2.0 + Changed signature for parameter callback to also be passed the + parameter. The old callback format will still work, but it will + raise a warning to give you a chance to migrate the code easier. + """ + + param_type_name = "parameter" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + required: bool = False, + default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, + callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, + nargs: t.Optional[int] = None, + multiple: bool = False, + metavar: t.Optional[str] = None, + expose_value: bool = True, + is_eager: bool = False, + envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, + shell_complete: t.Optional[ + t.Callable[ + [Context, "Parameter", str], + t.Union[t.List["CompletionItem"], t.List[str]], + ] + ] = None, + ) -> None: + self.name: t.Optional[str] + self.opts: t.List[str] + self.secondary_opts: t.List[str] + self.name, self.opts, self.secondary_opts = self._parse_decls( + param_decls or (), expose_value + ) + self.type: types.ParamType = types.convert_type(type, default) + + # Default nargs to what the type tells us if we have that + # information available. + if nargs is None: + if self.type.is_composite: + nargs = self.type.arity + else: + nargs = 1 + + self.required = required + self.callback = callback + self.nargs = nargs + self.multiple = multiple + self.expose_value = expose_value + self.default = default + self.is_eager = is_eager + self.metavar = metavar + self.envvar = envvar + self._custom_shell_complete = shell_complete + + if __debug__: + if self.type.is_composite and nargs != self.type.arity: + raise ValueError( + f"'nargs' must be {self.type.arity} (or None) for" + f" type {self.type!r}, but it was {nargs}." + ) + + # Skip no default or callable default. + check_default = default if not callable(default) else None + + if check_default is not None: + if multiple: + try: + # Only check the first value against nargs. + check_default = next(_check_iter(check_default), None) + except TypeError: + raise ValueError( + "'default' must be a list when 'multiple' is true." + ) from None + + # Can be None for multiple with empty default. + if nargs != 1 and check_default is not None: + try: + _check_iter(check_default) + except TypeError: + if multiple: + message = ( + "'default' must be a list of lists when 'multiple' is" + " true and 'nargs' != 1." + ) + else: + message = "'default' must be a list when 'nargs' != 1." + + raise ValueError(message) from None + + if nargs > 1 and len(check_default) != nargs: + subject = "item length" if multiple else "length" + raise ValueError( + f"'default' {subject} must match nargs={nargs}." + ) + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + return { + "name": self.name, + "param_type_name": self.param_type_name, + "opts": self.opts, + "secondary_opts": self.secondary_opts, + "type": self.type.to_info_dict(), + "required": self.required, + "nargs": self.nargs, + "multiple": self.multiple, + "default": self.default, + "envvar": self.envvar, + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + raise NotImplementedError() + + @property + def human_readable_name(self) -> str: + """Returns the human readable name of this parameter. This is the + same as the name for options, but the metavar for arguments. + """ + return self.name # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + + metavar = self.type.get_metavar(self) + + if metavar is None: + metavar = self.type.name.upper() + + if self.nargs != 1: + metavar += "..." + + return metavar + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + """Get the default for the parameter. Tries + :meth:`Context.lookup_default` first, then the local default. + + :param ctx: Current context. + :param call: If the default is a callable, call it. Disable to + return the callable instead. + + .. versionchanged:: 8.0.2 + Type casting is no longer performed when getting a default. + + .. versionchanged:: 8.0.1 + Type casting can fail in resilient parsing mode. Invalid + defaults will not prevent showing help text. + + .. versionchanged:: 8.0 + Looks at ``ctx.default_map`` first. + + .. versionchanged:: 8.0 + Added the ``call`` parameter. + """ + value = ctx.lookup_default(self.name, call=False) # type: ignore + + if value is None: + value = self.default + + if call and callable(value): + value = value() + + return value + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + raise NotImplementedError() + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, ParameterSource]: + value = opts.get(self.name) # type: ignore + source = ParameterSource.COMMANDLINE + + if value is None: + value = self.value_from_envvar(ctx) + source = ParameterSource.ENVIRONMENT + + if value is None: + value = ctx.lookup_default(self.name) # type: ignore + source = ParameterSource.DEFAULT_MAP + + if value is None: + value = self.get_default(ctx) + source = ParameterSource.DEFAULT + + return value, source + + def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: + """Convert and validate a value against the option's + :attr:`type`, :attr:`multiple`, and :attr:`nargs`. + """ + if value is None: + return () if self.multiple or self.nargs == -1 else None + + def check_iter(value: t.Any) -> t.Iterator[t.Any]: + try: + return _check_iter(value) + except TypeError: + # This should only happen when passing in args manually, + # the parser should construct an iterable when parsing + # the command line. + raise BadParameter( + _("Value must be an iterable."), ctx=ctx, param=self + ) from None + + if self.nargs == 1 or self.type.is_composite: + + def convert(value: t.Any) -> t.Any: + return self.type(value, param=self, ctx=ctx) + + elif self.nargs == -1: + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + return tuple(self.type(x, self, ctx) for x in check_iter(value)) + + else: # nargs > 1 + + def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] + value = tuple(check_iter(value)) + + if len(value) != self.nargs: + raise BadParameter( + ngettext( + "Takes {nargs} values but 1 was given.", + "Takes {nargs} values but {len} were given.", + len(value), + ).format(nargs=self.nargs, len=len(value)), + ctx=ctx, + param=self, + ) + + return tuple(self.type(x, self, ctx) for x in value) + + if self.multiple: + return tuple(convert(x) for x in check_iter(value)) + + return convert(value) + + def value_is_missing(self, value: t.Any) -> bool: + if value is None: + return True + + if (self.nargs != 1 or self.multiple) and value == (): + return True + + return False + + def process_value(self, ctx: Context, value: t.Any) -> t.Any: + value = self.type_cast_value(ctx, value) + + if self.required and self.value_is_missing(value): + raise MissingParameter(ctx=ctx, param=self) + + if self.callback is not None: + value = self.callback(ctx, self, value) + + return value + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + if self.envvar is None: + return None + + if isinstance(self.envvar, str): + rv = os.environ.get(self.envvar) + + if rv: + return rv + else: + for envvar in self.envvar: + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is not None and self.nargs != 1: + rv = self.type.split_envvar_value(rv) + + return rv + + def handle_parse_result( + self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] + ) -> t.Tuple[t.Any, t.List[str]]: + with augment_usage_errors(ctx, param=self): + value, source = self.consume_value(ctx, opts) + ctx.set_parameter_source(self.name, source) # type: ignore + + try: + value = self.process_value(ctx, value) + except Exception: + if not ctx.resilient_parsing: + raise + + value = None + + if self.expose_value: + ctx.params[self.name] = value # type: ignore + + return value, args + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + pass + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [] + + def get_error_hint(self, ctx: Context) -> str: + """Get a stringified version of the param for use in error messages to + indicate which param caused the error. + """ + hint_list = self.opts or [self.human_readable_name] + return " / ".join(f"'{x}'" for x in hint_list) + + def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: + """Return a list of completions for the incomplete value. If a + ``shell_complete`` function was given during init, it is used. + Otherwise, the :attr:`type` + :meth:`~click.types.ParamType.shell_complete` function is used. + + :param ctx: Invocation context for this command. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + if self._custom_shell_complete is not None: + results = self._custom_shell_complete(ctx, self, incomplete) + + if results and isinstance(results[0], str): + from click.shell_completion import CompletionItem + + results = [CompletionItem(c) for c in results] + + return t.cast(t.List["CompletionItem"], results) + + return self.type.shell_complete(ctx, self, incomplete) + + +class Option(Parameter): + """Options are usually optional values on the command line and + have some extra features that arguments don't have. + + All other parameters are passed onwards to the parameter constructor. + + :param show_default: Show the default value for this option in its + help text. Values are not shown by default, unless + :attr:`Context.show_default` is ``True``. If this value is a + string, it shows that string in parentheses instead of the + actual value. This is particularly useful for dynamic options. + For single option boolean flags, the default remains hidden if + its value is ``False``. + :param show_envvar: Controls if an environment variable should be + shown on the help page. Normally, environment variables are not + shown. + :param prompt: If set to ``True`` or a non empty string then the + user will be prompted for input. If set to ``True`` the prompt + will be the option name capitalized. + :param confirmation_prompt: Prompt a second time to confirm the + value if it was prompted for. Can be set to a string instead of + ``True`` to customize the message. + :param prompt_required: If set to ``False``, the user will be + prompted for input only when the option was specified as a flag + without a value. + :param hide_input: If this is ``True`` then the input on the prompt + will be hidden from the user. This is useful for password input. + :param is_flag: forces this option to act as a flag. The default is + auto detection. + :param flag_value: which value should be used for this flag if it's + enabled. This is set to a boolean automatically if + the option string contains a slash to mark two options. + :param multiple: if this is set to `True` then the argument is accepted + multiple times and recorded. This is similar to ``nargs`` + in how it works but supports arbitrary number of + arguments. + :param count: this flag makes an option increment an integer. + :param allow_from_autoenv: if this is enabled then the value of this + parameter will be pulled from an environment + variable in case a prefix is defined on the + context. + :param help: the help string. + :param hidden: hide this option from help outputs. + :param attrs: Other command arguments described in :class:`Parameter`. + + .. versionchanged:: 8.1.0 + Help text indentation is cleaned here instead of only in the + ``@option`` decorator. + + .. versionchanged:: 8.1.0 + The ``show_default`` parameter overrides + ``Context.show_default``. + + .. versionchanged:: 8.1.0 + The default of a single option boolean flag is not shown if the + default value is ``False``. + + .. versionchanged:: 8.0.1 + ``type`` is detected from ``flag_value`` if given. + """ + + param_type_name = "option" + + def __init__( + self, + param_decls: t.Optional[t.Sequence[str]] = None, + show_default: t.Union[bool, str, None] = None, + prompt: t.Union[bool, str] = False, + confirmation_prompt: t.Union[bool, str] = False, + prompt_required: bool = True, + hide_input: bool = False, + is_flag: t.Optional[bool] = None, + flag_value: t.Optional[t.Any] = None, + multiple: bool = False, + count: bool = False, + allow_from_autoenv: bool = True, + type: t.Optional[t.Union[types.ParamType, t.Any]] = None, + help: t.Optional[str] = None, + hidden: bool = False, + show_choices: bool = True, + show_envvar: bool = False, + **attrs: t.Any, + ) -> None: + if help: + help = inspect.cleandoc(help) + + default_is_missing = "default" not in attrs + super().__init__(param_decls, type=type, multiple=multiple, **attrs) + + if prompt is True: + if self.name is None: + raise TypeError("'name' is required with 'prompt=True'.") + + prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() + elif prompt is False: + prompt_text = None + else: + prompt_text = prompt + + self.prompt = prompt_text + self.confirmation_prompt = confirmation_prompt + self.prompt_required = prompt_required + self.hide_input = hide_input + self.hidden = hidden + + # If prompt is enabled but not required, then the option can be + # used as a flag to indicate using prompt or flag_value. + self._flag_needs_value = self.prompt is not None and not self.prompt_required + + if is_flag is None: + if flag_value is not None: + # Implicitly a flag because flag_value was set. + is_flag = True + elif self._flag_needs_value: + # Not a flag, but when used as a flag it shows a prompt. + is_flag = False + else: + # Implicitly a flag because flag options were given. + is_flag = bool(self.secondary_opts) + elif is_flag is False and not self._flag_needs_value: + # Not a flag, and prompt is not enabled, can be used as a + # flag if flag_value is set. + self._flag_needs_value = flag_value is not None + + self.default: t.Union[t.Any, t.Callable[[], t.Any]] + + if is_flag and default_is_missing and not self.required: + if multiple: + self.default = () + else: + self.default = False + + if flag_value is None: + flag_value = not self.default + + self.type: types.ParamType + if is_flag and type is None: + # Re-guess the type from the flag value instead of the + # default. + self.type = types.convert_type(None, flag_value) + + self.is_flag: bool = is_flag + self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType) + self.flag_value: t.Any = flag_value + + # Counting + self.count = count + if count: + if type is None: + self.type = types.IntRange(min=0) + if default_is_missing: + self.default = 0 + + self.allow_from_autoenv = allow_from_autoenv + self.help = help + self.show_default = show_default + self.show_choices = show_choices + self.show_envvar = show_envvar + + if __debug__: + if self.nargs == -1: + raise TypeError("nargs=-1 is not supported for options.") + + if self.prompt and self.is_flag and not self.is_bool_flag: + raise TypeError("'prompt' is not valid for non-boolean flag.") + + if not self.is_bool_flag and self.secondary_opts: + raise TypeError("Secondary flag is not valid for non-boolean flag.") + + if self.is_bool_flag and self.hide_input and self.prompt is not None: + raise TypeError( + "'prompt' with 'hide_input' is not valid for boolean flag." + ) + + if self.count: + if self.multiple: + raise TypeError("'count' is not valid with 'multiple'.") + + if self.is_flag: + raise TypeError("'count' is not valid with 'is_flag'.") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + help=self.help, + prompt=self.prompt, + is_flag=self.is_flag, + flag_value=self.flag_value, + count=self.count, + hidden=self.hidden, + ) + return info_dict + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + opts = [] + secondary_opts = [] + name = None + possible_names = [] + + for decl in decls: + if decl.isidentifier(): + if name is not None: + raise TypeError(f"Name '{name}' defined twice") + name = decl + else: + split_char = ";" if decl[:1] == "/" else "/" + if split_char in decl: + first, second = decl.split(split_char, 1) + first = first.rstrip() + if first: + possible_names.append(split_opt(first)) + opts.append(first) + second = second.lstrip() + if second: + secondary_opts.append(second.lstrip()) + if first == second: + raise ValueError( + f"Boolean option {decl!r} cannot use the" + " same flag for true/false." + ) + else: + possible_names.append(split_opt(decl)) + opts.append(decl) + + if name is None and possible_names: + possible_names.sort(key=lambda x: -len(x[0])) # group long options first + name = possible_names[0][1].replace("-", "_").lower() + if not name.isidentifier(): + name = None + + if name is None: + if not expose_value: + return None, opts, secondary_opts + raise TypeError("Could not determine name for option") + + if not opts and not secondary_opts: + raise TypeError( + f"No options defined but a name was passed ({name})." + " Did you mean to declare an argument instead? Did" + f" you mean to pass '--{name}'?" + ) + + return name, opts, secondary_opts + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + if self.multiple: + action = "append" + elif self.count: + action = "count" + else: + action = "store" + + if self.is_flag: + action = f"{action}_const" + + if self.is_bool_flag and self.secondary_opts: + parser.add_option( + obj=self, opts=self.opts, dest=self.name, action=action, const=True + ) + parser.add_option( + obj=self, + opts=self.secondary_opts, + dest=self.name, + action=action, + const=False, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + const=self.flag_value, + ) + else: + parser.add_option( + obj=self, + opts=self.opts, + dest=self.name, + action=action, + nargs=self.nargs, + ) + + def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: + if self.hidden: + return None + + any_prefix_is_slash = False + + def _write_opts(opts: t.Sequence[str]) -> str: + nonlocal any_prefix_is_slash + + rv, any_slashes = join_options(opts) + + if any_slashes: + any_prefix_is_slash = True + + if not self.is_flag and not self.count: + rv += f" {self.make_metavar()}" + + return rv + + rv = [_write_opts(self.opts)] + + if self.secondary_opts: + rv.append(_write_opts(self.secondary_opts)) + + help = self.help or "" + extra = [] + + if self.show_envvar: + envvar = self.envvar + + if envvar is None: + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + + if envvar is not None: + var_str = ( + envvar + if isinstance(envvar, str) + else ", ".join(str(d) for d in envvar) + ) + extra.append(_("env var: {var}").format(var=var_str)) + + # Temporarily enable resilient parsing to avoid type casting + # failing for the default. Might be possible to extend this to + # help formatting in general. + resilient = ctx.resilient_parsing + ctx.resilient_parsing = True + + try: + default_value = self.get_default(ctx, call=False) + finally: + ctx.resilient_parsing = resilient + + show_default = False + show_default_is_str = False + + if self.show_default is not None: + if isinstance(self.show_default, str): + show_default_is_str = show_default = True + else: + show_default = self.show_default + elif ctx.show_default is not None: + show_default = ctx.show_default + + if show_default_is_str or (show_default and (default_value is not None)): + if show_default_is_str: + default_string = f"({self.show_default})" + elif isinstance(default_value, (list, tuple)): + default_string = ", ".join(str(d) for d in default_value) + elif inspect.isfunction(default_value): + default_string = _("(dynamic)") + elif self.is_bool_flag and self.secondary_opts: + # For boolean flags that have distinct True/False opts, + # use the opt without prefix instead of the value. + default_string = split_opt( + (self.opts if self.default else self.secondary_opts)[0] + )[1] + elif self.is_bool_flag and not self.secondary_opts and not default_value: + default_string = "" + else: + default_string = str(default_value) + + if default_string: + extra.append(_("default: {default}").format(default=default_string)) + + if ( + isinstance(self.type, types._NumberRangeBase) + # skip count with default range type + and not (self.count and self.type.min == 0 and self.type.max is None) + ): + range_str = self.type._describe_range() + + if range_str: + extra.append(range_str) + + if self.required: + extra.append(_("required")) + + if extra: + extra_str = "; ".join(extra) + help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" + + return ("; " if any_prefix_is_slash else " / ").join(rv), help + + @t.overload + def get_default( + self, ctx: Context, call: "te.Literal[True]" = True + ) -> t.Optional[t.Any]: + ... + + @t.overload + def get_default( + self, ctx: Context, call: bool = ... + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + ... + + def get_default( + self, ctx: Context, call: bool = True + ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: + # If we're a non boolean flag our default is more complex because + # we need to look at all flags in the same group to figure out + # if we're the default one in which case we return the flag + # value as default. + if self.is_flag and not self.is_bool_flag: + for param in ctx.command.params: + if param.name == self.name and param.default: + return t.cast(Option, param).flag_value + + return None + + return super().get_default(ctx, call=call) + + def prompt_for_value(self, ctx: Context) -> t.Any: + """This is an alternative flow that can be activated in the full + value processing if a value does not exist. It will prompt the + user until a valid value exists and then returns the processed + value as result. + """ + assert self.prompt is not None + + # Calculate the default before prompting anything to be stable. + default = self.get_default(ctx) + + # If this is a prompt for a flag we need to handle this + # differently. + if self.is_bool_flag: + return confirm(self.prompt, default) + + return prompt( + self.prompt, + default=default, + type=self.type, + hide_input=self.hide_input, + show_choices=self.show_choices, + confirmation_prompt=self.confirmation_prompt, + value_proc=lambda x: self.process_value(ctx, x), + ) + + def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: + rv = super().resolve_envvar_value(ctx) + + if rv is not None: + return rv + + if ( + self.allow_from_autoenv + and ctx.auto_envvar_prefix is not None + and self.name is not None + ): + envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" + rv = os.environ.get(envvar) + + if rv: + return rv + + return None + + def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: + rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) + + if rv is None: + return None + + value_depth = (self.nargs != 1) + bool(self.multiple) + + if value_depth > 0: + rv = self.type.split_envvar_value(rv) + + if self.multiple and self.nargs != 1: + rv = batch(rv, self.nargs) + + return rv + + def consume_value( + self, ctx: Context, opts: t.Mapping[str, "Parameter"] + ) -> t.Tuple[t.Any, ParameterSource]: + value, source = super().consume_value(ctx, opts) + + # The parser will emit a sentinel value if the option can be + # given as a flag without a value. This is different from None + # to distinguish from the flag not being given at all. + if value is _flag_needs_value: + if self.prompt is not None and not ctx.resilient_parsing: + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + else: + value = self.flag_value + source = ParameterSource.COMMANDLINE + + elif ( + self.multiple + and value is not None + and any(v is _flag_needs_value for v in value) + ): + value = [self.flag_value if v is _flag_needs_value else v for v in value] + source = ParameterSource.COMMANDLINE + + # The value wasn't set, or used the param's default, prompt if + # prompting is enabled. + elif ( + source in {None, ParameterSource.DEFAULT} + and self.prompt is not None + and (self.required or self.prompt_required) + and not ctx.resilient_parsing + ): + value = self.prompt_for_value(ctx) + source = ParameterSource.PROMPT + + return value, source + + +class Argument(Parameter): + """Arguments are positional parameters to a command. They generally + provide fewer features than options but can have infinite ``nargs`` + and are required by default. + + All parameters are passed onwards to the constructor of :class:`Parameter`. + """ + + param_type_name = "argument" + + def __init__( + self, + param_decls: t.Sequence[str], + required: t.Optional[bool] = None, + **attrs: t.Any, + ) -> None: + if required is None: + if attrs.get("default") is not None: + required = False + else: + required = attrs.get("nargs", 1) > 0 + + if "multiple" in attrs: + raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") + + super().__init__(param_decls, required=required, **attrs) + + if __debug__: + if self.default is not None and self.nargs == -1: + raise TypeError("'default' is not supported for nargs=-1.") + + @property + def human_readable_name(self) -> str: + if self.metavar is not None: + return self.metavar + return self.name.upper() # type: ignore + + def make_metavar(self) -> str: + if self.metavar is not None: + return self.metavar + var = self.type.get_metavar(self) + if not var: + var = self.name.upper() # type: ignore + if not self.required: + var = f"[{var}]" + if self.nargs != 1: + var += "..." + return var + + def _parse_decls( + self, decls: t.Sequence[str], expose_value: bool + ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: + if not decls: + if not expose_value: + return None, [], [] + raise TypeError("Could not determine name for argument") + if len(decls) == 1: + name = arg = decls[0] + name = name.replace("-", "_").lower() + else: + raise TypeError( + "Arguments take exactly one parameter declaration, got" + f" {len(decls)}." + ) + return name, [arg], [] + + def get_usage_pieces(self, ctx: Context) -> t.List[str]: + return [self.make_metavar()] + + def get_error_hint(self, ctx: Context) -> str: + return f"'{self.make_metavar()}'" + + def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: + parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/backend/test/lib/python3.8/site-packages/click/decorators.py b/backend/test/lib/python3.8/site-packages/click/decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..d9bba9502ca353bca5136f43c92436ff584f06e1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/decorators.py @@ -0,0 +1,561 @@ +import inspect +import types +import typing as t +from functools import update_wrapper +from gettext import gettext as _ + +from .core import Argument +from .core import Command +from .core import Context +from .core import Group +from .core import Option +from .core import Parameter +from .globals import get_current_context +from .utils import echo + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") +T = t.TypeVar("T") +_AnyCallable = t.Callable[..., t.Any] +FC = t.TypeVar("FC", bound=t.Union[_AnyCallable, Command]) + + +def pass_context(f: "t.Callable[te.Concatenate[Context, P], R]") -> "t.Callable[P, R]": + """Marks a callback as wanting to receive the current context + object as first argument. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context(), *args, **kwargs) + + return update_wrapper(new_func, f) + + +def pass_obj(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + """Similar to :func:`pass_context`, but only pass the object on the + context onwards (:attr:`Context.obj`). This is useful if that object + represents the state of a nested system. + """ + + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + return f(get_current_context().obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + +def make_pass_decorator( + object_type: t.Type[T], ensure: bool = False +) -> t.Callable[["t.Callable[te.Concatenate[T, P], R]"], "t.Callable[P, R]"]: + """Given an object type this creates a decorator that will work + similar to :func:`pass_obj` but instead of passing the object of the + current context, it will find the innermost context of type + :func:`object_type`. + + This generates a decorator that works roughly like this:: + + from functools import update_wrapper + + def decorator(f): + @pass_context + def new_func(ctx, *args, **kwargs): + obj = ctx.find_object(object_type) + return ctx.invoke(f, obj, *args, **kwargs) + return update_wrapper(new_func, f) + return decorator + + :param object_type: the type of the object to pass. + :param ensure: if set to `True`, a new object will be created and + remembered on the context if it's not there yet. + """ + + def decorator(f: "t.Callable[te.Concatenate[T, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": + ctx = get_current_context() + + obj: t.Optional[T] + if ensure: + obj = ctx.ensure_object(object_type) + else: + obj = ctx.find_object(object_type) + + if obj is None: + raise RuntimeError( + "Managed to invoke callback without a context" + f" object of type {object_type.__name__!r}" + " existing." + ) + + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + return decorator # type: ignore[return-value] + + +def pass_meta_key( + key: str, *, doc_description: t.Optional[str] = None +) -> "t.Callable[[t.Callable[te.Concatenate[t.Any, P], R]], t.Callable[P, R]]": + """Create a decorator that passes a key from + :attr:`click.Context.meta` as the first argument to the decorated + function. + + :param key: Key in ``Context.meta`` to pass. + :param doc_description: Description of the object being passed, + inserted into the decorator's docstring. Defaults to "the 'key' + key from Context.meta". + + .. versionadded:: 8.0 + """ + + def decorator(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": + def new_func(*args: "P.args", **kwargs: "P.kwargs") -> R: + ctx = get_current_context() + obj = ctx.meta[key] + return ctx.invoke(f, obj, *args, **kwargs) + + return update_wrapper(new_func, f) + + if doc_description is None: + doc_description = f"the {key!r} key from :attr:`click.Context.meta`" + + decorator.__doc__ = ( + f"Decorator that passes {doc_description} as the first argument" + " to the decorated function." + ) + return decorator # type: ignore[return-value] + + +CmdType = t.TypeVar("CmdType", bound=Command) + + +# variant: no call, directly as decorator for a function. +@t.overload +def command(name: _AnyCallable) -> Command: + ... + + +# variant: with positional name and with positional or keyword cls argument: +# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) +@t.overload +def command( + name: t.Optional[str], + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: + ... + + +# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) +@t.overload +def command( + name: None = None, + *, + cls: t.Type[CmdType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], CmdType]: + ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def command( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Command]: + ... + + +def command( + name: t.Union[t.Optional[str], _AnyCallable] = None, + cls: t.Optional[t.Type[CmdType]] = None, + **attrs: t.Any, +) -> t.Union[Command, t.Callable[[_AnyCallable], t.Union[Command, CmdType]]]: + r"""Creates a new :class:`Command` and uses the decorated function as + callback. This will also automatically attach all decorated + :func:`option`\s and :func:`argument`\s as parameters to the command. + + The name of the command defaults to the name of the function with + underscores replaced by dashes. If you want to change that, you can + pass the intended name as the first argument. + + All keyword arguments are forwarded to the underlying command class. + For the ``params`` argument, any decorated params are appended to + the end of the list. + + Once decorated the function turns into a :class:`Command` instance + that can be invoked as a command line utility or be attached to a + command :class:`Group`. + + :param name: the name of the command. This defaults to the function + name with underscores replaced by dashes. + :param cls: the command class to instantiate. This defaults to + :class:`Command`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + + .. versionchanged:: 8.1 + The ``params`` argument can be used. Decorated params are + appended to the end of the list. + """ + + func: t.Optional[t.Callable[[_AnyCallable], t.Any]] = None + + if callable(name): + func = name + name = None + assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." + assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." + + if cls is None: + cls = t.cast(t.Type[CmdType], Command) + + def decorator(f: _AnyCallable) -> CmdType: + if isinstance(f, Command): + raise TypeError("Attempted to convert a callback into a command twice.") + + attr_params = attrs.pop("params", None) + params = attr_params if attr_params is not None else [] + + try: + decorator_params = f.__click_params__ # type: ignore + except AttributeError: + pass + else: + del f.__click_params__ # type: ignore + params.extend(reversed(decorator_params)) + + if attrs.get("help") is None: + attrs["help"] = f.__doc__ + + if t.TYPE_CHECKING: + assert cls is not None + assert not callable(name) + + cmd = cls( + name=name or f.__name__.lower().replace("_", "-"), + callback=f, + params=params, + **attrs, + ) + cmd.__doc__ = f.__doc__ + return cmd + + if func is not None: + return decorator(func) + + return decorator + + +GrpType = t.TypeVar("GrpType", bound=Group) + + +# variant: no call, directly as decorator for a function. +@t.overload +def group(name: _AnyCallable) -> Group: + ... + + +# variant: with positional name and with positional or keyword cls argument: +# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) +@t.overload +def group( + name: t.Optional[str], + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: + ... + + +# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) +@t.overload +def group( + name: None = None, + *, + cls: t.Type[GrpType], + **attrs: t.Any, +) -> t.Callable[[_AnyCallable], GrpType]: + ... + + +# variant: with optional string name, no cls argument provided. +@t.overload +def group( + name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any +) -> t.Callable[[_AnyCallable], Group]: + ... + + +def group( + name: t.Union[str, _AnyCallable, None] = None, + cls: t.Optional[t.Type[GrpType]] = None, + **attrs: t.Any, +) -> t.Union[Group, t.Callable[[_AnyCallable], t.Union[Group, GrpType]]]: + """Creates a new :class:`Group` with a function as callback. This + works otherwise the same as :func:`command` just that the `cls` + parameter is set to :class:`Group`. + + .. versionchanged:: 8.1 + This decorator can be applied without parentheses. + """ + if cls is None: + cls = t.cast(t.Type[GrpType], Group) + + if callable(name): + return command(cls=cls, **attrs)(name) + + return command(name, cls, **attrs) + + +def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: + if isinstance(f, Command): + f.params.append(param) + else: + if not hasattr(f, "__click_params__"): + f.__click_params__ = [] # type: ignore + + f.__click_params__.append(param) # type: ignore + + +def argument( + *param_decls: str, cls: t.Optional[t.Type[Argument]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an argument to the command. All positional arguments are + passed as parameter declarations to :class:`Argument`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Argument` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default argument class, refer to :class:`Argument` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the argument class to instantiate. This defaults to + :class:`Argument`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Argument + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def option( + *param_decls: str, cls: t.Optional[t.Type[Option]] = None, **attrs: t.Any +) -> t.Callable[[FC], FC]: + """Attaches an option to the command. All positional arguments are + passed as parameter declarations to :class:`Option`; all keyword + arguments are forwarded unchanged (except ``cls``). + This is equivalent to creating an :class:`Option` instance manually + and attaching it to the :attr:`Command.params` list. + + For the default option class, refer to :class:`Option` and + :class:`Parameter` for descriptions of parameters. + + :param cls: the option class to instantiate. This defaults to + :class:`Option`. + :param param_decls: Passed as positional arguments to the constructor of + ``cls``. + :param attrs: Passed as keyword arguments to the constructor of ``cls``. + """ + if cls is None: + cls = Option + + def decorator(f: FC) -> FC: + _param_memo(f, cls(param_decls, **attrs)) + return f + + return decorator + + +def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--yes`` option which shows a prompt before continuing if + not passed. If the prompt is declined, the program will exit. + + :param param_decls: One or more option names. Defaults to the single + value ``"--yes"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value: + ctx.abort() + + if not param_decls: + param_decls = ("--yes",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("callback", callback) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("prompt", "Do you want to continue?") + kwargs.setdefault("help", "Confirm the action without prompting.") + return option(*param_decls, **kwargs) + + +def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--password`` option which prompts for a password, hiding + input and asking to enter the value again for confirmation. + + :param param_decls: One or more option names. Defaults to the single + value ``"--password"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + if not param_decls: + param_decls = ("--password",) + + kwargs.setdefault("prompt", True) + kwargs.setdefault("confirmation_prompt", True) + kwargs.setdefault("hide_input", True) + return option(*param_decls, **kwargs) + + +def version_option( + version: t.Optional[str] = None, + *param_decls: str, + package_name: t.Optional[str] = None, + prog_name: t.Optional[str] = None, + message: t.Optional[str] = None, + **kwargs: t.Any, +) -> t.Callable[[FC], FC]: + """Add a ``--version`` option which immediately prints the version + number and exits the program. + + If ``version`` is not provided, Click will try to detect it using + :func:`importlib.metadata.version` to get the version for the + ``package_name``. On Python < 3.8, the ``importlib_metadata`` + backport must be installed. + + If ``package_name`` is not provided, Click will try to detect it by + inspecting the stack frames. This will be used to detect the + version, so it must match the name of the installed package. + + :param version: The version number to show. If not provided, Click + will try to detect it. + :param param_decls: One or more option names. Defaults to the single + value ``"--version"``. + :param package_name: The package name to detect the version from. If + not provided, Click will try to detect it. + :param prog_name: The name of the CLI to show in the message. If not + provided, it will be detected from the command. + :param message: The message to show. The values ``%(prog)s``, + ``%(package)s``, and ``%(version)s`` are available. Defaults to + ``"%(prog)s, version %(version)s"``. + :param kwargs: Extra arguments are passed to :func:`option`. + :raise RuntimeError: ``version`` could not be detected. + + .. versionchanged:: 8.0 + Add the ``package_name`` parameter, and the ``%(package)s`` + value for messages. + + .. versionchanged:: 8.0 + Use :mod:`importlib.metadata` instead of ``pkg_resources``. The + version is detected based on the package name, not the entry + point name. The Python package name must match the installed + package name, or be passed with ``package_name=``. + """ + if message is None: + message = _("%(prog)s, version %(version)s") + + if version is None and package_name is None: + frame = inspect.currentframe() + f_back = frame.f_back if frame is not None else None + f_globals = f_back.f_globals if f_back is not None else None + # break reference cycle + # https://docs.python.org/3/library/inspect.html#the-interpreter-stack + del frame + + if f_globals is not None: + package_name = f_globals.get("__name__") + + if package_name == "__main__": + package_name = f_globals.get("__package__") + + if package_name: + package_name = package_name.partition(".")[0] + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + nonlocal prog_name + nonlocal version + + if prog_name is None: + prog_name = ctx.find_root().info_name + + if version is None and package_name is not None: + metadata: t.Optional[types.ModuleType] + + try: + from importlib import metadata # type: ignore + except ImportError: + # Python < 3.8 + import importlib_metadata as metadata # type: ignore + + try: + version = metadata.version(package_name) # type: ignore + except metadata.PackageNotFoundError: # type: ignore + raise RuntimeError( + f"{package_name!r} is not installed. Try passing" + " 'package_name' instead." + ) from None + + if version is None: + raise RuntimeError( + f"Could not determine the version for {package_name!r} automatically." + ) + + echo( + message % {"prog": prog_name, "package": package_name, "version": version}, + color=ctx.color, + ) + ctx.exit() + + if not param_decls: + param_decls = ("--version",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show the version and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) + + +def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: + """Add a ``--help`` option which immediately prints the help page + and exits the program. + + This is usually unnecessary, as the ``--help`` option is added to + each command automatically unless ``add_help_option=False`` is + passed. + + :param param_decls: One or more option names. Defaults to the single + value ``"--help"``. + :param kwargs: Extra arguments are passed to :func:`option`. + """ + + def callback(ctx: Context, param: Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + + echo(ctx.get_help(), color=ctx.color) + ctx.exit() + + if not param_decls: + param_decls = ("--help",) + + kwargs.setdefault("is_flag", True) + kwargs.setdefault("expose_value", False) + kwargs.setdefault("is_eager", True) + kwargs.setdefault("help", _("Show this message and exit.")) + kwargs["callback"] = callback + return option(*param_decls, **kwargs) diff --git a/backend/test/lib/python3.8/site-packages/click/exceptions.py b/backend/test/lib/python3.8/site-packages/click/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..fe68a3613f74e5e82da4e3eedc7d9451977838dd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/exceptions.py @@ -0,0 +1,288 @@ +import typing as t +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import get_text_stderr +from .utils import echo +from .utils import format_filename + +if t.TYPE_CHECKING: + from .core import Command + from .core import Context + from .core import Parameter + + +def _join_param_hints( + param_hint: t.Optional[t.Union[t.Sequence[str], str]] +) -> t.Optional[str]: + if param_hint is not None and not isinstance(param_hint, str): + return " / ".join(repr(x) for x in param_hint) + + return param_hint + + +class ClickException(Exception): + """An exception that Click can handle and show to the user.""" + + #: The exit code for this exception. + exit_code = 1 + + def __init__(self, message: str) -> None: + super().__init__(message) + self.message = message + + def format_message(self) -> str: + return self.message + + def __str__(self) -> str: + return self.message + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + + echo(_("Error: {message}").format(message=self.format_message()), file=file) + + +class UsageError(ClickException): + """An internal exception that signals a usage error. This typically + aborts any further handling. + + :param message: the error message to display. + :param ctx: optionally the context that caused this error. Click will + fill in the context automatically in some situations. + """ + + exit_code = 2 + + def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: + super().__init__(message) + self.ctx = ctx + self.cmd: t.Optional["Command"] = self.ctx.command if self.ctx else None + + def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: + if file is None: + file = get_text_stderr() + color = None + hint = "" + if ( + self.ctx is not None + and self.ctx.command.get_help_option(self.ctx) is not None + ): + hint = _("Try '{command} {option}' for help.").format( + command=self.ctx.command_path, option=self.ctx.help_option_names[0] + ) + hint = f"{hint}\n" + if self.ctx is not None: + color = self.ctx.color + echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) + echo( + _("Error: {message}").format(message=self.format_message()), + file=file, + color=color, + ) + + +class BadParameter(UsageError): + """An exception that formats out a standardized error message for a + bad parameter. This is useful when thrown from a callback or type as + Click will attach contextual information to it (for instance, which + parameter it is). + + .. versionadded:: 2.0 + + :param param: the parameter object that caused this error. This can + be left out, and Click will attach this info itself + if possible. + :param param_hint: a string that shows up as parameter name. This + can be used as alternative to `param` in cases + where custom validation should happen. If it is + a string it's used as such, if it's a list then + each item is quoted and separated. + """ + + def __init__( + self, + message: str, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + ) -> None: + super().__init__(message, ctx) + self.param = param + self.param_hint = param_hint + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + return _("Invalid value: {message}").format(message=self.message) + + return _("Invalid value for {param_hint}: {message}").format( + param_hint=_join_param_hints(param_hint), message=self.message + ) + + +class MissingParameter(BadParameter): + """Raised if click required an option or argument but it was not + provided when invoking the script. + + .. versionadded:: 4.0 + + :param param_type: a string that indicates the type of the parameter. + The default is to inherit the parameter type from + the given `param`. Valid values are ``'parameter'``, + ``'option'`` or ``'argument'``. + """ + + def __init__( + self, + message: t.Optional[str] = None, + ctx: t.Optional["Context"] = None, + param: t.Optional["Parameter"] = None, + param_hint: t.Optional[str] = None, + param_type: t.Optional[str] = None, + ) -> None: + super().__init__(message or "", ctx, param, param_hint) + self.param_type = param_type + + def format_message(self) -> str: + if self.param_hint is not None: + param_hint: t.Optional[str] = self.param_hint + elif self.param is not None: + param_hint = self.param.get_error_hint(self.ctx) # type: ignore + else: + param_hint = None + + param_hint = _join_param_hints(param_hint) + param_hint = f" {param_hint}" if param_hint else "" + + param_type = self.param_type + if param_type is None and self.param is not None: + param_type = self.param.param_type_name + + msg = self.message + if self.param is not None: + msg_extra = self.param.type.get_missing_message(self.param) + if msg_extra: + if msg: + msg += f". {msg_extra}" + else: + msg = msg_extra + + msg = f" {msg}" if msg else "" + + # Translate param_type for known types. + if param_type == "argument": + missing = _("Missing argument") + elif param_type == "option": + missing = _("Missing option") + elif param_type == "parameter": + missing = _("Missing parameter") + else: + missing = _("Missing {param_type}").format(param_type=param_type) + + return f"{missing}{param_hint}.{msg}" + + def __str__(self) -> str: + if not self.message: + param_name = self.param.name if self.param else None + return _("Missing parameter: {param_name}").format(param_name=param_name) + else: + return self.message + + +class NoSuchOption(UsageError): + """Raised if click attempted to handle an option that does not + exist. + + .. versionadded:: 4.0 + """ + + def __init__( + self, + option_name: str, + message: t.Optional[str] = None, + possibilities: t.Optional[t.Sequence[str]] = None, + ctx: t.Optional["Context"] = None, + ) -> None: + if message is None: + message = _("No such option: {name}").format(name=option_name) + + super().__init__(message, ctx) + self.option_name = option_name + self.possibilities = possibilities + + def format_message(self) -> str: + if not self.possibilities: + return self.message + + possibility_str = ", ".join(sorted(self.possibilities)) + suggest = ngettext( + "Did you mean {possibility}?", + "(Possible options: {possibilities})", + len(self.possibilities), + ).format(possibility=possibility_str, possibilities=possibility_str) + return f"{self.message} {suggest}" + + +class BadOptionUsage(UsageError): + """Raised if an option is generally supplied but the use of the option + was incorrect. This is for instance raised if the number of arguments + for an option is not correct. + + .. versionadded:: 4.0 + + :param option_name: the name of the option being used incorrectly. + """ + + def __init__( + self, option_name: str, message: str, ctx: t.Optional["Context"] = None + ) -> None: + super().__init__(message, ctx) + self.option_name = option_name + + +class BadArgumentUsage(UsageError): + """Raised if an argument is generally supplied but the use of the argument + was incorrect. This is for instance raised if the number of values + for an argument is not correct. + + .. versionadded:: 6.0 + """ + + +class FileError(ClickException): + """Raised if a file cannot be opened.""" + + def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: + if hint is None: + hint = _("unknown error") + + super().__init__(hint) + self.ui_filename: str = format_filename(filename) + self.filename = filename + + def format_message(self) -> str: + return _("Could not open file {filename!r}: {message}").format( + filename=self.ui_filename, message=self.message + ) + + +class Abort(RuntimeError): + """An internal signalling exception that signals Click to abort.""" + + +class Exit(RuntimeError): + """An exception that indicates that the application should exit with some + status code. + + :param code: the status code to exit with. + """ + + __slots__ = ("exit_code",) + + def __init__(self, code: int = 0) -> None: + self.exit_code: int = code diff --git a/backend/test/lib/python3.8/site-packages/click/formatting.py b/backend/test/lib/python3.8/site-packages/click/formatting.py new file mode 100644 index 0000000000000000000000000000000000000000..ddd2a2f825f206164eb9efb0a5c41528365beb85 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/formatting.py @@ -0,0 +1,301 @@ +import typing as t +from contextlib import contextmanager +from gettext import gettext as _ + +from ._compat import term_len +from .parser import split_opt + +# Can force a width. This is used by the test system +FORCED_WIDTH: t.Optional[int] = None + + +def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: + widths: t.Dict[int, int] = {} + + for row in rows: + for idx, col in enumerate(row): + widths[idx] = max(widths.get(idx, 0), term_len(col)) + + return tuple(y for x, y in sorted(widths.items())) + + +def iter_rows( + rows: t.Iterable[t.Tuple[str, str]], col_count: int +) -> t.Iterator[t.Tuple[str, ...]]: + for row in rows: + yield row + ("",) * (col_count - len(row)) + + +def wrap_text( + text: str, + width: int = 78, + initial_indent: str = "", + subsequent_indent: str = "", + preserve_paragraphs: bool = False, +) -> str: + """A helper function that intelligently wraps text. By default, it + assumes that it operates on a single paragraph of text but if the + `preserve_paragraphs` parameter is provided it will intelligently + handle paragraphs (defined by two empty lines). + + If paragraphs are handled, a paragraph can be prefixed with an empty + line containing the ``\\b`` character (``\\x08``) to indicate that + no rewrapping should happen in that block. + + :param text: the text that should be rewrapped. + :param width: the maximum width for the text. + :param initial_indent: the initial indent that should be placed on the + first line as a string. + :param subsequent_indent: the indent string that should be placed on + each consecutive line. + :param preserve_paragraphs: if this flag is set then the wrapping will + intelligently handle paragraphs. + """ + from ._textwrap import TextWrapper + + text = text.expandtabs() + wrapper = TextWrapper( + width, + initial_indent=initial_indent, + subsequent_indent=subsequent_indent, + replace_whitespace=False, + ) + if not preserve_paragraphs: + return wrapper.fill(text) + + p: t.List[t.Tuple[int, bool, str]] = [] + buf: t.List[str] = [] + indent = None + + def _flush_par() -> None: + if not buf: + return + if buf[0].strip() == "\b": + p.append((indent or 0, True, "\n".join(buf[1:]))) + else: + p.append((indent or 0, False, " ".join(buf))) + del buf[:] + + for line in text.splitlines(): + if not line: + _flush_par() + indent = None + else: + if indent is None: + orig_len = term_len(line) + line = line.lstrip() + indent = orig_len - term_len(line) + buf.append(line) + _flush_par() + + rv = [] + for indent, raw, text in p: + with wrapper.extra_indent(" " * indent): + if raw: + rv.append(wrapper.indent_only(text)) + else: + rv.append(wrapper.fill(text)) + + return "\n\n".join(rv) + + +class HelpFormatter: + """This class helps with formatting text-based help pages. It's + usually just needed for very special internal cases, but it's also + exposed so that developers can write their own fancy outputs. + + At present, it always writes into memory. + + :param indent_increment: the additional increment for each level. + :param width: the width for the text. This defaults to the terminal + width clamped to a maximum of 78. + """ + + def __init__( + self, + indent_increment: int = 2, + width: t.Optional[int] = None, + max_width: t.Optional[int] = None, + ) -> None: + import shutil + + self.indent_increment = indent_increment + if max_width is None: + max_width = 80 + if width is None: + width = FORCED_WIDTH + if width is None: + width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) + self.width = width + self.current_indent = 0 + self.buffer: t.List[str] = [] + + def write(self, string: str) -> None: + """Writes a unicode string into the internal buffer.""" + self.buffer.append(string) + + def indent(self) -> None: + """Increases the indentation.""" + self.current_indent += self.indent_increment + + def dedent(self) -> None: + """Decreases the indentation.""" + self.current_indent -= self.indent_increment + + def write_usage( + self, prog: str, args: str = "", prefix: t.Optional[str] = None + ) -> None: + """Writes a usage line into the buffer. + + :param prog: the program name. + :param args: whitespace separated list of arguments. + :param prefix: The prefix for the first line. Defaults to + ``"Usage: "``. + """ + if prefix is None: + prefix = f"{_('Usage:')} " + + usage_prefix = f"{prefix:>{self.current_indent}}{prog} " + text_width = self.width - self.current_indent + + if text_width >= (term_len(usage_prefix) + 20): + # The arguments will fit to the right of the prefix. + indent = " " * term_len(usage_prefix) + self.write( + wrap_text( + args, + text_width, + initial_indent=usage_prefix, + subsequent_indent=indent, + ) + ) + else: + # The prefix is too long, put the arguments on the next line. + self.write(usage_prefix) + self.write("\n") + indent = " " * (max(self.current_indent, term_len(prefix)) + 4) + self.write( + wrap_text( + args, text_width, initial_indent=indent, subsequent_indent=indent + ) + ) + + self.write("\n") + + def write_heading(self, heading: str) -> None: + """Writes a heading into the buffer.""" + self.write(f"{'':>{self.current_indent}}{heading}:\n") + + def write_paragraph(self) -> None: + """Writes a paragraph into the buffer.""" + if self.buffer: + self.write("\n") + + def write_text(self, text: str) -> None: + """Writes re-indented text into the buffer. This rewraps and + preserves paragraphs. + """ + indent = " " * self.current_indent + self.write( + wrap_text( + text, + self.width, + initial_indent=indent, + subsequent_indent=indent, + preserve_paragraphs=True, + ) + ) + self.write("\n") + + def write_dl( + self, + rows: t.Sequence[t.Tuple[str, str]], + col_max: int = 30, + col_spacing: int = 2, + ) -> None: + """Writes a definition list into the buffer. This is how options + and commands are usually formatted. + + :param rows: a list of two item tuples for the terms and values. + :param col_max: the maximum width of the first column. + :param col_spacing: the number of spaces between the first and + second column. + """ + rows = list(rows) + widths = measure_table(rows) + if len(widths) != 2: + raise TypeError("Expected two columns for definition list") + + first_col = min(widths[0], col_max) + col_spacing + + for first, second in iter_rows(rows, len(widths)): + self.write(f"{'':>{self.current_indent}}{first}") + if not second: + self.write("\n") + continue + if term_len(first) <= first_col - col_spacing: + self.write(" " * (first_col - term_len(first))) + else: + self.write("\n") + self.write(" " * (first_col + self.current_indent)) + + text_width = max(self.width - first_col - 2, 10) + wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) + lines = wrapped_text.splitlines() + + if lines: + self.write(f"{lines[0]}\n") + + for line in lines[1:]: + self.write(f"{'':>{first_col + self.current_indent}}{line}\n") + else: + self.write("\n") + + @contextmanager + def section(self, name: str) -> t.Iterator[None]: + """Helpful context manager that writes a paragraph, a heading, + and the indents. + + :param name: the section name that is written as heading. + """ + self.write_paragraph() + self.write_heading(name) + self.indent() + try: + yield + finally: + self.dedent() + + @contextmanager + def indentation(self) -> t.Iterator[None]: + """A context manager that increases the indentation.""" + self.indent() + try: + yield + finally: + self.dedent() + + def getvalue(self) -> str: + """Returns the buffer contents.""" + return "".join(self.buffer) + + +def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: + """Given a list of option strings this joins them in the most appropriate + way and returns them in the form ``(formatted_string, + any_prefix_is_slash)`` where the second item in the tuple is a flag that + indicates if any of the option prefixes was a slash. + """ + rv = [] + any_prefix_is_slash = False + + for opt in options: + prefix = split_opt(opt)[0] + + if prefix == "/": + any_prefix_is_slash = True + + rv.append((len(prefix), opt)) + + rv.sort(key=lambda x: x[0]) + return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/backend/test/lib/python3.8/site-packages/click/globals.py b/backend/test/lib/python3.8/site-packages/click/globals.py new file mode 100644 index 0000000000000000000000000000000000000000..480058f10dd6a8205d1bff0b94de7ae347a7629a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/globals.py @@ -0,0 +1,68 @@ +import typing as t +from threading import local + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + +_local = local() + + +@t.overload +def get_current_context(silent: "te.Literal[False]" = False) -> "Context": + ... + + +@t.overload +def get_current_context(silent: bool = ...) -> t.Optional["Context"]: + ... + + +def get_current_context(silent: bool = False) -> t.Optional["Context"]: + """Returns the current click context. This can be used as a way to + access the current context object from anywhere. This is a more implicit + alternative to the :func:`pass_context` decorator. This function is + primarily useful for helpers such as :func:`echo` which might be + interested in changing its behavior based on the current context. + + To push the current context, :meth:`Context.scope` can be used. + + .. versionadded:: 5.0 + + :param silent: if set to `True` the return value is `None` if no context + is available. The default behavior is to raise a + :exc:`RuntimeError`. + """ + try: + return t.cast("Context", _local.stack[-1]) + except (AttributeError, IndexError) as e: + if not silent: + raise RuntimeError("There is no active click context.") from e + + return None + + +def push_context(ctx: "Context") -> None: + """Pushes a new context to the current stack.""" + _local.__dict__.setdefault("stack", []).append(ctx) + + +def pop_context() -> None: + """Removes the top level from the stack.""" + _local.stack.pop() + + +def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: + """Internal helper to get the default value of the color flag. If a + value is passed it's returned unchanged, otherwise it's looked up from + the current context. + """ + if color is not None: + return color + + ctx = get_current_context(silent=True) + + if ctx is not None: + return ctx.color + + return None diff --git a/backend/test/lib/python3.8/site-packages/click/parser.py b/backend/test/lib/python3.8/site-packages/click/parser.py new file mode 100644 index 0000000000000000000000000000000000000000..5fa7adfac842bfa5689fd1a41ae4017be1ebff6f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/parser.py @@ -0,0 +1,529 @@ +""" +This module started out as largely a copy paste from the stdlib's +optparse module with the features removed that we do not need from +optparse because we implement them in Click on a higher level (for +instance type handling, help formatting and a lot more). + +The plan is to remove more and more from here over time. + +The reason this is a different module and not optparse from the stdlib +is that there are differences in 2.x and 3.x about the error messages +generated and optparse in the stdlib uses gettext for no good reason +and might cause us issues. + +Click uses parts of optparse written by Gregory P. Ward and maintained +by the Python Software Foundation. This is limited to code in parser.py. + +Copyright 2001-2006 Gregory P. Ward. All rights reserved. +Copyright 2002-2006 Python Software Foundation. All rights reserved. +""" +# This code uses parts of optparse written by Gregory P. Ward and +# maintained by the Python Software Foundation. +# Copyright 2001-2006 Gregory P. Ward +# Copyright 2002-2006 Python Software Foundation +import typing as t +from collections import deque +from gettext import gettext as _ +from gettext import ngettext + +from .exceptions import BadArgumentUsage +from .exceptions import BadOptionUsage +from .exceptions import NoSuchOption +from .exceptions import UsageError + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Argument as CoreArgument + from .core import Context + from .core import Option as CoreOption + from .core import Parameter as CoreParameter + +V = t.TypeVar("V") + +# Sentinel value that indicates an option was passed as a flag without a +# value but is not a flag option. Option.consume_value uses this to +# prompt or use the flag_value. +_flag_needs_value = object() + + +def _unpack_args( + args: t.Sequence[str], nargs_spec: t.Sequence[int] +) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: + """Given an iterable of arguments and an iterable of nargs specifications, + it returns a tuple with all the unpacked arguments at the first index + and all remaining arguments as the second. + + The nargs specification is the number of arguments that should be consumed + or `-1` to indicate that this position should eat up all the remainders. + + Missing items are filled with `None`. + """ + args = deque(args) + nargs_spec = deque(nargs_spec) + rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] + spos: t.Optional[int] = None + + def _fetch(c: "te.Deque[V]") -> t.Optional[V]: + try: + if spos is None: + return c.popleft() + else: + return c.pop() + except IndexError: + return None + + while nargs_spec: + nargs = _fetch(nargs_spec) + + if nargs is None: + continue + + if nargs == 1: + rv.append(_fetch(args)) + elif nargs > 1: + x = [_fetch(args) for _ in range(nargs)] + + # If we're reversed, we're pulling in the arguments in reverse, + # so we need to turn them around. + if spos is not None: + x.reverse() + + rv.append(tuple(x)) + elif nargs < 0: + if spos is not None: + raise TypeError("Cannot have two nargs < 0") + + spos = len(rv) + rv.append(None) + + # spos is the position of the wildcard (star). If it's not `None`, + # we fill it with the remainder. + if spos is not None: + rv[spos] = tuple(args) + args = [] + rv[spos + 1 :] = reversed(rv[spos + 1 :]) + + return tuple(rv), list(args) + + +def split_opt(opt: str) -> t.Tuple[str, str]: + first = opt[:1] + if first.isalnum(): + return "", opt + if opt[1:2] == first: + return opt[:2], opt[2:] + return first, opt[1:] + + +def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: + if ctx is None or ctx.token_normalize_func is None: + return opt + prefix, opt = split_opt(opt) + return f"{prefix}{ctx.token_normalize_func(opt)}" + + +def split_arg_string(string: str) -> t.List[str]: + """Split an argument string as with :func:`shlex.split`, but don't + fail if the string is incomplete. Ignores a missing closing quote or + incomplete escape sequence and uses the partial token as-is. + + .. code-block:: python + + split_arg_string("example 'my file") + ["example", "my file"] + + split_arg_string("example my\\") + ["example", "my"] + + :param string: String to split. + """ + import shlex + + lex = shlex.shlex(string, posix=True) + lex.whitespace_split = True + lex.commenters = "" + out = [] + + try: + for token in lex: + out.append(token) + except ValueError: + # Raised when end-of-string is reached in an invalid state. Use + # the partial token as-is. The quote or escape character is in + # lex.state, not lex.token. + out.append(lex.token) + + return out + + +class Option: + def __init__( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ): + self._short_opts = [] + self._long_opts = [] + self.prefixes: t.Set[str] = set() + + for opt in opts: + prefix, value = split_opt(opt) + if not prefix: + raise ValueError(f"Invalid start character for option ({opt})") + self.prefixes.add(prefix[0]) + if len(prefix) == 1 and len(value) == 1: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + self.prefixes.add(prefix) + + if action is None: + action = "store" + + self.dest = dest + self.action = action + self.nargs = nargs + self.const = const + self.obj = obj + + @property + def takes_value(self) -> bool: + return self.action in ("store", "append") + + def process(self, value: t.Any, state: "ParsingState") -> None: + if self.action == "store": + state.opts[self.dest] = value # type: ignore + elif self.action == "store_const": + state.opts[self.dest] = self.const # type: ignore + elif self.action == "append": + state.opts.setdefault(self.dest, []).append(value) # type: ignore + elif self.action == "append_const": + state.opts.setdefault(self.dest, []).append(self.const) # type: ignore + elif self.action == "count": + state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore + else: + raise ValueError(f"unknown action '{self.action}'") + state.order.append(self.obj) + + +class Argument: + def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): + self.dest = dest + self.nargs = nargs + self.obj = obj + + def process( + self, + value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], + state: "ParsingState", + ) -> None: + if self.nargs > 1: + assert value is not None + holes = sum(1 for x in value if x is None) + if holes == len(value): + value = None + elif holes != 0: + raise BadArgumentUsage( + _("Argument {name!r} takes {nargs} values.").format( + name=self.dest, nargs=self.nargs + ) + ) + + if self.nargs == -1 and self.obj.envvar is not None and value == (): + # Replace empty tuple with None so that a value from the + # environment may be tried. + value = None + + state.opts[self.dest] = value # type: ignore + state.order.append(self.obj) + + +class ParsingState: + def __init__(self, rargs: t.List[str]) -> None: + self.opts: t.Dict[str, t.Any] = {} + self.largs: t.List[str] = [] + self.rargs = rargs + self.order: t.List["CoreParameter"] = [] + + +class OptionParser: + """The option parser is an internal class that is ultimately used to + parse options and arguments. It's modelled after optparse and brings + a similar but vastly simplified API. It should generally not be used + directly as the high level Click classes wrap it for you. + + It's not nearly as extensible as optparse or argparse as it does not + implement features that are implemented on a higher level (such as + types or defaults). + + :param ctx: optionally the :class:`~click.Context` where this parser + should go with. + """ + + def __init__(self, ctx: t.Optional["Context"] = None) -> None: + #: The :class:`~click.Context` for this parser. This might be + #: `None` for some advanced use cases. + self.ctx = ctx + #: This controls how the parser deals with interspersed arguments. + #: If this is set to `False`, the parser will stop on the first + #: non-option. Click uses this to implement nested subcommands + #: safely. + self.allow_interspersed_args: bool = True + #: This tells the parser how to deal with unknown options. By + #: default it will error out (which is sensible), but there is a + #: second mode where it will ignore it and continue processing + #: after shifting all the unknown options into the resulting args. + self.ignore_unknown_options: bool = False + + if ctx is not None: + self.allow_interspersed_args = ctx.allow_interspersed_args + self.ignore_unknown_options = ctx.ignore_unknown_options + + self._short_opt: t.Dict[str, Option] = {} + self._long_opt: t.Dict[str, Option] = {} + self._opt_prefixes = {"-", "--"} + self._args: t.List[Argument] = [] + + def add_option( + self, + obj: "CoreOption", + opts: t.Sequence[str], + dest: t.Optional[str], + action: t.Optional[str] = None, + nargs: int = 1, + const: t.Optional[t.Any] = None, + ) -> None: + """Adds a new option named `dest` to the parser. The destination + is not inferred (unlike with optparse) and needs to be explicitly + provided. Action can be any of ``store``, ``store_const``, + ``append``, ``append_const`` or ``count``. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + opts = [normalize_opt(opt, self.ctx) for opt in opts] + option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) + self._opt_prefixes.update(option.prefixes) + for opt in option._short_opts: + self._short_opt[opt] = option + for opt in option._long_opts: + self._long_opt[opt] = option + + def add_argument( + self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 + ) -> None: + """Adds a positional argument named `dest` to the parser. + + The `obj` can be used to identify the option in the order list + that is returned from the parser. + """ + self._args.append(Argument(obj, dest=dest, nargs=nargs)) + + def parse_args( + self, args: t.List[str] + ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: + """Parses positional arguments and returns ``(values, args, order)`` + for the parsed options and arguments as well as the leftover + arguments if there are any. The order is a list of objects as they + appear on the command line. If arguments appear multiple times they + will be memorized multiple times as well. + """ + state = ParsingState(args) + try: + self._process_args_for_options(state) + self._process_args_for_args(state) + except UsageError: + if self.ctx is None or not self.ctx.resilient_parsing: + raise + return state.opts, state.largs, state.order + + def _process_args_for_args(self, state: ParsingState) -> None: + pargs, args = _unpack_args( + state.largs + state.rargs, [x.nargs for x in self._args] + ) + + for idx, arg in enumerate(self._args): + arg.process(pargs[idx], state) + + state.largs = args + state.rargs = [] + + def _process_args_for_options(self, state: ParsingState) -> None: + while state.rargs: + arg = state.rargs.pop(0) + arglen = len(arg) + # Double dashes always handled explicitly regardless of what + # prefixes are valid. + if arg == "--": + return + elif arg[:1] in self._opt_prefixes and arglen > 1: + self._process_opts(arg, state) + elif self.allow_interspersed_args: + state.largs.append(arg) + else: + state.rargs.insert(0, arg) + return + + # Say this is the original argument list: + # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] + # ^ + # (we are about to process arg(i)). + # + # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of + # [arg0, ..., arg(i-1)] (any options and their arguments will have + # been removed from largs). + # + # The while loop will usually consume 1 or more arguments per pass. + # If it consumes 1 (eg. arg is an option that takes no arguments), + # then after _process_arg() is done the situation is: + # + # largs = subset of [arg0, ..., arg(i)] + # rargs = [arg(i+1), ..., arg(N-1)] + # + # If allow_interspersed_args is false, largs will always be + # *empty* -- still a subset of [arg0, ..., arg(i-1)], but + # not a very interesting subset! + + def _match_long_opt( + self, opt: str, explicit_value: t.Optional[str], state: ParsingState + ) -> None: + if opt not in self._long_opt: + from difflib import get_close_matches + + possibilities = get_close_matches(opt, self._long_opt) + raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) + + option = self._long_opt[opt] + if option.takes_value: + # At this point it's safe to modify rargs by injecting the + # explicit value, because no exception is raised in this + # branch. This means that the inserted value will be fully + # consumed. + if explicit_value is not None: + state.rargs.insert(0, explicit_value) + + value = self._get_value_from_state(opt, option, state) + + elif explicit_value is not None: + raise BadOptionUsage( + opt, _("Option {name!r} does not take a value.").format(name=opt) + ) + + else: + value = None + + option.process(value, state) + + def _match_short_opt(self, arg: str, state: ParsingState) -> None: + stop = False + i = 1 + prefix = arg[0] + unknown_options = [] + + for ch in arg[1:]: + opt = normalize_opt(f"{prefix}{ch}", self.ctx) + option = self._short_opt.get(opt) + i += 1 + + if not option: + if self.ignore_unknown_options: + unknown_options.append(ch) + continue + raise NoSuchOption(opt, ctx=self.ctx) + if option.takes_value: + # Any characters left in arg? Pretend they're the + # next arg, and stop consuming characters of arg. + if i < len(arg): + state.rargs.insert(0, arg[i:]) + stop = True + + value = self._get_value_from_state(opt, option, state) + + else: + value = None + + option.process(value, state) + + if stop: + break + + # If we got any unknown options we recombine the string of the + # remaining options and re-attach the prefix, then report that + # to the state as new larg. This way there is basic combinatorics + # that can be achieved while still ignoring unknown arguments. + if self.ignore_unknown_options and unknown_options: + state.largs.append(f"{prefix}{''.join(unknown_options)}") + + def _get_value_from_state( + self, option_name: str, option: Option, state: ParsingState + ) -> t.Any: + nargs = option.nargs + + if len(state.rargs) < nargs: + if option.obj._flag_needs_value: + # Option allows omitting the value. + value = _flag_needs_value + else: + raise BadOptionUsage( + option_name, + ngettext( + "Option {name!r} requires an argument.", + "Option {name!r} requires {nargs} arguments.", + nargs, + ).format(name=option_name, nargs=nargs), + ) + elif nargs == 1: + next_rarg = state.rargs[0] + + if ( + option.obj._flag_needs_value + and isinstance(next_rarg, str) + and next_rarg[:1] in self._opt_prefixes + and len(next_rarg) > 1 + ): + # The next arg looks like the start of an option, don't + # use it as the value if omitting the value is allowed. + value = _flag_needs_value + else: + value = state.rargs.pop(0) + else: + value = tuple(state.rargs[:nargs]) + del state.rargs[:nargs] + + return value + + def _process_opts(self, arg: str, state: ParsingState) -> None: + explicit_value = None + # Long option handling happens in two parts. The first part is + # supporting explicitly attached values. In any case, we will try + # to long match the option first. + if "=" in arg: + long_opt, explicit_value = arg.split("=", 1) + else: + long_opt = arg + norm_long_opt = normalize_opt(long_opt, self.ctx) + + # At this point we will match the (assumed) long option through + # the long option matching code. Note that this allows options + # like "-foo" to be matched as long options. + try: + self._match_long_opt(norm_long_opt, explicit_value, state) + except NoSuchOption: + # At this point the long option matching failed, and we need + # to try with short options. However there is a special rule + # which says, that if we have a two character options prefix + # (applies to "--foo" for instance), we do not dispatch to the + # short option code and will instead raise the no option + # error. + if arg[:2] not in self._opt_prefixes: + self._match_short_opt(arg, state) + return + + if not self.ignore_unknown_options: + raise + + state.largs.append(arg) diff --git a/backend/test/lib/python3.8/site-packages/click/py.typed b/backend/test/lib/python3.8/site-packages/click/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/click/shell_completion.py b/backend/test/lib/python3.8/site-packages/click/shell_completion.py new file mode 100644 index 0000000000000000000000000000000000000000..dc9e00b9b0c6f4903b674f03343e887bd490b081 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/shell_completion.py @@ -0,0 +1,596 @@ +import os +import re +import typing as t +from gettext import gettext as _ + +from .core import Argument +from .core import BaseCommand +from .core import Context +from .core import MultiCommand +from .core import Option +from .core import Parameter +from .core import ParameterSource +from .parser import split_arg_string +from .utils import echo + + +def shell_complete( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + instruction: str, +) -> int: + """Perform shell completion for the given CLI program. + + :param cli: Command being called. + :param ctx_args: Extra arguments to pass to + ``cli.make_context``. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + :param instruction: Value of ``complete_var`` with the completion + instruction and shell, in the form ``instruction_shell``. + :return: Status code to exit with. + """ + shell, _, instruction = instruction.partition("_") + comp_cls = get_completion_class(shell) + + if comp_cls is None: + return 1 + + comp = comp_cls(cli, ctx_args, prog_name, complete_var) + + if instruction == "source": + echo(comp.source()) + return 0 + + if instruction == "complete": + echo(comp.complete()) + return 0 + + return 1 + + +class CompletionItem: + """Represents a completion value and metadata about the value. The + default metadata is ``type`` to indicate special shell handling, + and ``help`` if a shell supports showing a help string next to the + value. + + Arbitrary parameters can be passed when creating the object, and + accessed using ``item.attr``. If an attribute wasn't passed, + accessing it returns ``None``. + + :param value: The completion suggestion. + :param type: Tells the shell script to provide special completion + support for the type. Click uses ``"dir"`` and ``"file"``. + :param help: String shown next to the value if supported. + :param kwargs: Arbitrary metadata. The built-in implementations + don't use this, but custom type completions paired with custom + shell support could use it. + """ + + __slots__ = ("value", "type", "help", "_info") + + def __init__( + self, + value: t.Any, + type: str = "plain", + help: t.Optional[str] = None, + **kwargs: t.Any, + ) -> None: + self.value: t.Any = value + self.type: str = type + self.help: t.Optional[str] = help + self._info = kwargs + + def __getattr__(self, name: str) -> t.Any: + return self._info.get(name) + + +# Only Bash >= 4.4 has the nosort option. +_SOURCE_BASH = """\ +%(complete_func)s() { + local IFS=$'\\n' + local response + + response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ +%(complete_var)s=bash_complete $1) + + for completion in $response; do + IFS=',' read type value <<< "$completion" + + if [[ $type == 'dir' ]]; then + COMPREPLY=() + compopt -o dirnames + elif [[ $type == 'file' ]]; then + COMPREPLY=() + compopt -o default + elif [[ $type == 'plain' ]]; then + COMPREPLY+=($value) + fi + done + + return 0 +} + +%(complete_func)s_setup() { + complete -o nosort -F %(complete_func)s %(prog_name)s +} + +%(complete_func)s_setup; +""" + +_SOURCE_ZSH = """\ +#compdef %(prog_name)s + +%(complete_func)s() { + local -a completions + local -a completions_with_descriptions + local -a response + (( ! $+commands[%(prog_name)s] )) && return 1 + + response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ +%(complete_var)s=zsh_complete %(prog_name)s)}") + + for type key descr in ${response}; do + if [[ "$type" == "plain" ]]; then + if [[ "$descr" == "_" ]]; then + completions+=("$key") + else + completions_with_descriptions+=("$key":"$descr") + fi + elif [[ "$type" == "dir" ]]; then + _path_files -/ + elif [[ "$type" == "file" ]]; then + _path_files -f + fi + done + + if [ -n "$completions_with_descriptions" ]; then + _describe -V unsorted completions_with_descriptions -U + fi + + if [ -n "$completions" ]; then + compadd -U -V unsorted -a completions + fi +} + +if [[ $zsh_eval_context[-1] == loadautofunc ]]; then + # autoload from fpath, call function directly + %(complete_func)s "$@" +else + # eval/source/. command, register function for later + compdef %(complete_func)s %(prog_name)s +fi +""" + +_SOURCE_FISH = """\ +function %(complete_func)s; + set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ +COMP_CWORD=(commandline -t) %(prog_name)s); + + for completion in $response; + set -l metadata (string split "," $completion); + + if test $metadata[1] = "dir"; + __fish_complete_directories $metadata[2]; + else if test $metadata[1] = "file"; + __fish_complete_path $metadata[2]; + else if test $metadata[1] = "plain"; + echo $metadata[2]; + end; + end; +end; + +complete --no-files --command %(prog_name)s --arguments \ +"(%(complete_func)s)"; +""" + + +class ShellComplete: + """Base class for providing shell completion support. A subclass for + a given shell will override attributes and methods to implement the + completion instructions (``source`` and ``complete``). + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param complete_var: Name of the environment variable that holds + the completion instruction. + + .. versionadded:: 8.0 + """ + + name: t.ClassVar[str] + """Name to register the shell as with :func:`add_completion_class`. + This is used in completion instructions (``{name}_source`` and + ``{name}_complete``). + """ + + source_template: t.ClassVar[str] + """Completion script template formatted by :meth:`source`. This must + be provided by subclasses. + """ + + def __init__( + self, + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + complete_var: str, + ) -> None: + self.cli = cli + self.ctx_args = ctx_args + self.prog_name = prog_name + self.complete_var = complete_var + + @property + def func_name(self) -> str: + """The name of the shell function defined by the completion + script. + """ + safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) + return f"_{safe_name}_completion" + + def source_vars(self) -> t.Dict[str, t.Any]: + """Vars for formatting :attr:`source_template`. + + By default this provides ``complete_func``, ``complete_var``, + and ``prog_name``. + """ + return { + "complete_func": self.func_name, + "complete_var": self.complete_var, + "prog_name": self.prog_name, + } + + def source(self) -> str: + """Produce the shell script that defines the completion + function. By default this ``%``-style formats + :attr:`source_template` with the dict returned by + :meth:`source_vars`. + """ + return self.source_template % self.source_vars() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + """Use the env vars defined by the shell script to return a + tuple of ``args, incomplete``. This must be implemented by + subclasses. + """ + raise NotImplementedError + + def get_completions( + self, args: t.List[str], incomplete: str + ) -> t.List[CompletionItem]: + """Determine the context and last complete command or parameter + from the complete args. Call that object's ``shell_complete`` + method to get the completions for the incomplete value. + + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) + obj, incomplete = _resolve_incomplete(ctx, args, incomplete) + return obj.shell_complete(ctx, incomplete) + + def format_completion(self, item: CompletionItem) -> str: + """Format a completion item into the form recognized by the + shell script. This must be implemented by subclasses. + + :param item: Completion item to format. + """ + raise NotImplementedError + + def complete(self) -> str: + """Produce the completion data to send back to the shell. + + By default this calls :meth:`get_completion_args`, gets the + completions, then calls :meth:`format_completion` for each + completion. + """ + args, incomplete = self.get_completion_args() + completions = self.get_completions(args, incomplete) + out = [self.format_completion(item) for item in completions] + return "\n".join(out) + + +class BashComplete(ShellComplete): + """Shell completion for Bash.""" + + name = "bash" + source_template = _SOURCE_BASH + + @staticmethod + def _check_version() -> None: + import subprocess + + output = subprocess.run( + ["bash", "-c", 'echo "${BASH_VERSION}"'], stdout=subprocess.PIPE + ) + match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) + + if match is not None: + major, minor = match.groups() + + if major < "4" or major == "4" and minor < "4": + echo( + _( + "Shell completion is not supported for Bash" + " versions older than 4.4." + ), + err=True, + ) + else: + echo( + _("Couldn't detect Bash version, shell completion is not supported."), + err=True, + ) + + def source(self) -> str: + self._check_version() + return super().source() + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type},{item.value}" + + +class ZshComplete(ShellComplete): + """Shell completion for Zsh.""" + + name = "zsh" + source_template = _SOURCE_ZSH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + cword = int(os.environ["COMP_CWORD"]) + args = cwords[1:cword] + + try: + incomplete = cwords[cword] + except IndexError: + incomplete = "" + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" + + +class FishComplete(ShellComplete): + """Shell completion for Fish.""" + + name = "fish" + source_template = _SOURCE_FISH + + def get_completion_args(self) -> t.Tuple[t.List[str], str]: + cwords = split_arg_string(os.environ["COMP_WORDS"]) + incomplete = os.environ["COMP_CWORD"] + args = cwords[1:] + + # Fish stores the partial word in both COMP_WORDS and + # COMP_CWORD, remove it from complete args. + if incomplete and args and args[-1] == incomplete: + args.pop() + + return args, incomplete + + def format_completion(self, item: CompletionItem) -> str: + if item.help: + return f"{item.type},{item.value}\t{item.help}" + + return f"{item.type},{item.value}" + + +ShellCompleteType = t.TypeVar("ShellCompleteType", bound=t.Type[ShellComplete]) + + +_available_shells: t.Dict[str, t.Type[ShellComplete]] = { + "bash": BashComplete, + "fish": FishComplete, + "zsh": ZshComplete, +} + + +def add_completion_class( + cls: ShellCompleteType, name: t.Optional[str] = None +) -> ShellCompleteType: + """Register a :class:`ShellComplete` subclass under the given name. + The name will be provided by the completion instruction environment + variable during completion. + + :param cls: The completion class that will handle completion for the + shell. + :param name: Name to register the class under. Defaults to the + class's ``name`` attribute. + """ + if name is None: + name = cls.name + + _available_shells[name] = cls + + return cls + + +def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: + """Look up a registered :class:`ShellComplete` subclass by the name + provided by the completion instruction environment variable. If the + name isn't registered, returns ``None``. + + :param shell: Name the class is registered under. + """ + return _available_shells.get(shell) + + +def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: + """Determine if the given parameter is an argument that can still + accept values. + + :param ctx: Invocation context for the command represented by the + parsed complete args. + :param param: Argument object being checked. + """ + if not isinstance(param, Argument): + return False + + assert param.name is not None + # Will be None if expose_value is False. + value = ctx.params.get(param.name) + return ( + param.nargs == -1 + or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE + or ( + param.nargs > 1 + and isinstance(value, (tuple, list)) + and len(value) < param.nargs + ) + ) + + +def _start_of_option(ctx: Context, value: str) -> bool: + """Check if the value looks like the start of an option.""" + if not value: + return False + + c = value[0] + return c in ctx._opt_prefixes + + +def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: + """Determine if the given parameter is an option that needs a value. + + :param args: List of complete args before the incomplete value. + :param param: Option object being checked. + """ + if not isinstance(param, Option): + return False + + if param.is_flag or param.count: + return False + + last_option = None + + for index, arg in enumerate(reversed(args)): + if index + 1 > param.nargs: + break + + if _start_of_option(ctx, arg): + last_option = arg + + return last_option is not None and last_option in param.opts + + +def _resolve_context( + cli: BaseCommand, + ctx_args: t.MutableMapping[str, t.Any], + prog_name: str, + args: t.List[str], +) -> Context: + """Produce the context hierarchy starting with the command and + traversing the complete arguments. This only follows the commands, + it doesn't trigger input prompts or callbacks. + + :param cli: Command being called. + :param prog_name: Name of the executable in the shell. + :param args: List of complete args before the incomplete value. + """ + ctx_args["resilient_parsing"] = True + ctx = cli.make_context(prog_name, args.copy(), **ctx_args) + args = ctx.protected_args + ctx.args + + while args: + command = ctx.command + + if isinstance(command, MultiCommand): + if not command.chain: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) + args = ctx.protected_args + ctx.args + else: + sub_ctx = ctx + + while args: + name, cmd, args = command.resolve_command(ctx, args) + + if cmd is None: + return ctx + + sub_ctx = cmd.make_context( + name, + args, + parent=ctx, + allow_extra_args=True, + allow_interspersed_args=False, + resilient_parsing=True, + ) + args = sub_ctx.args + + ctx = sub_ctx + args = [*sub_ctx.protected_args, *sub_ctx.args] + else: + break + + return ctx + + +def _resolve_incomplete( + ctx: Context, args: t.List[str], incomplete: str +) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: + """Find the Click object that will handle the completion of the + incomplete value. Return the object and the incomplete value. + + :param ctx: Invocation context for the command represented by + the parsed complete args. + :param args: List of complete args before the incomplete value. + :param incomplete: Value being completed. May be empty. + """ + # Different shells treat an "=" between a long option name and + # value differently. Might keep the value joined, return the "=" + # as a separate item, or return the split name and value. Always + # split and discard the "=" to make completion easier. + if incomplete == "=": + incomplete = "" + elif "=" in incomplete and _start_of_option(ctx, incomplete): + name, _, incomplete = incomplete.partition("=") + args.append(name) + + # The "--" marker tells Click to stop treating values as options + # even if they start with the option character. If it hasn't been + # given and the incomplete arg looks like an option, the current + # command will provide option name completions. + if "--" not in args and _start_of_option(ctx, incomplete): + return ctx.command, incomplete + + params = ctx.command.get_params(ctx) + + # If the last complete arg is an option name with an incomplete + # value, the option will provide value completions. + for param in params: + if _is_incomplete_option(ctx, args, param): + return param, incomplete + + # It's not an option name or value. The first argument without a + # parsed value will provide value completions. + for param in params: + if _is_incomplete_argument(ctx, param): + return param, incomplete + + # There were no unparsed arguments, the command may be a group that + # will provide command name completions. + return ctx.command, incomplete diff --git a/backend/test/lib/python3.8/site-packages/click/termui.py b/backend/test/lib/python3.8/site-packages/click/termui.py new file mode 100644 index 0000000000000000000000000000000000000000..db7a4b286174fdf26f3251631a2066eda2fa5bea --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/termui.py @@ -0,0 +1,784 @@ +import inspect +import io +import itertools +import sys +import typing as t +from gettext import gettext as _ + +from ._compat import isatty +from ._compat import strip_ansi +from .exceptions import Abort +from .exceptions import UsageError +from .globals import resolve_color_default +from .types import Choice +from .types import convert_type +from .types import ParamType +from .utils import echo +from .utils import LazyFile + +if t.TYPE_CHECKING: + from ._termui_impl import ProgressBar + +V = t.TypeVar("V") + +# The prompt functions to use. The doc tools currently override these +# functions to customize how they work. +visible_prompt_func: t.Callable[[str], str] = input + +_ansi_colors = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + "reset": 39, + "bright_black": 90, + "bright_red": 91, + "bright_green": 92, + "bright_yellow": 93, + "bright_blue": 94, + "bright_magenta": 95, + "bright_cyan": 96, + "bright_white": 97, +} +_ansi_reset_all = "\033[0m" + + +def hidden_prompt_func(prompt: str) -> str: + import getpass + + return getpass.getpass(prompt) + + +def _build_prompt( + text: str, + suffix: str, + show_default: bool = False, + default: t.Optional[t.Any] = None, + show_choices: bool = True, + type: t.Optional[ParamType] = None, +) -> str: + prompt = text + if type is not None and show_choices and isinstance(type, Choice): + prompt += f" ({', '.join(map(str, type.choices))})" + if default is not None and show_default: + prompt = f"{prompt} [{_format_default(default)}]" + return f"{prompt}{suffix}" + + +def _format_default(default: t.Any) -> t.Any: + if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): + return default.name + + return default + + +def prompt( + text: str, + default: t.Optional[t.Any] = None, + hide_input: bool = False, + confirmation_prompt: t.Union[bool, str] = False, + type: t.Optional[t.Union[ParamType, t.Any]] = None, + value_proc: t.Optional[t.Callable[[str], t.Any]] = None, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, + show_choices: bool = True, +) -> t.Any: + """Prompts a user for input. This is a convenience function that can + be used to prompt a user for input later. + + If the user aborts the input by sending an interrupt signal, this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the text to show for the prompt. + :param default: the default value to use if no input happens. If this + is not given it will prompt until it's aborted. + :param hide_input: if this is set to true then the input value will + be hidden. + :param confirmation_prompt: Prompt a second time to confirm the + value. Can be set to a string instead of ``True`` to customize + the message. + :param type: the type to use to check the value against. + :param value_proc: if this parameter is provided it's a function that + is invoked instead of the type conversion to + convert a value. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + :param show_choices: Show or hide choices if the passed type is a Choice. + For example if type is a Choice of either day or week, + show_choices is true and text is "Group by" then the + prompt will be "Group by (day, week): ". + + .. versionadded:: 8.0 + ``confirmation_prompt`` can be a custom string. + + .. versionadded:: 7.0 + Added the ``show_choices`` parameter. + + .. versionadded:: 6.0 + Added unicode support for cmd.exe on Windows. + + .. versionadded:: 4.0 + Added the `err` parameter. + + """ + + def prompt_func(text: str) -> str: + f = hidden_prompt_func if hide_input else visible_prompt_func + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(text.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + return f(" ") + except (KeyboardInterrupt, EOFError): + # getpass doesn't print a newline if the user aborts input with ^C. + # Allegedly this behavior is inherited from getpass(3). + # A doc bug has been filed at https://bugs.python.org/issue24711 + if hide_input: + echo(None, err=err) + raise Abort() from None + + if value_proc is None: + value_proc = convert_type(type, default) + + prompt = _build_prompt( + text, prompt_suffix, show_default, default, show_choices, type + ) + + if confirmation_prompt: + if confirmation_prompt is True: + confirmation_prompt = _("Repeat for confirmation") + + confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) + + while True: + while True: + value = prompt_func(prompt) + if value: + break + elif default is not None: + value = default + break + try: + result = value_proc(value) + except UsageError as e: + if hide_input: + echo(_("Error: The value you entered was invalid."), err=err) + else: + echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 + continue + if not confirmation_prompt: + return result + while True: + value2 = prompt_func(confirmation_prompt) + is_empty = not value and not value2 + if value2 or is_empty: + break + if value == value2: + return result + echo(_("Error: The two entered values do not match."), err=err) + + +def confirm( + text: str, + default: t.Optional[bool] = False, + abort: bool = False, + prompt_suffix: str = ": ", + show_default: bool = True, + err: bool = False, +) -> bool: + """Prompts for confirmation (yes/no question). + + If the user aborts the input by sending a interrupt signal this + function will catch it and raise a :exc:`Abort` exception. + + :param text: the question to ask. + :param default: The default value to use when no input is given. If + ``None``, repeat until input is given. + :param abort: if this is set to `True` a negative answer aborts the + exception by raising :exc:`Abort`. + :param prompt_suffix: a suffix that should be added to the prompt. + :param show_default: shows or hides the default value in the prompt. + :param err: if set to true the file defaults to ``stderr`` instead of + ``stdout``, the same as with echo. + + .. versionchanged:: 8.0 + Repeat until input is given if ``default`` is ``None``. + + .. versionadded:: 4.0 + Added the ``err`` parameter. + """ + prompt = _build_prompt( + text, + prompt_suffix, + show_default, + "y/n" if default is None else ("Y/n" if default else "y/N"), + ) + + while True: + try: + # Write the prompt separately so that we get nice + # coloring through colorama on Windows + echo(prompt.rstrip(" "), nl=False, err=err) + # Echo a space to stdout to work around an issue where + # readline causes backspace to clear the whole line. + value = visible_prompt_func(" ").lower().strip() + except (KeyboardInterrupt, EOFError): + raise Abort() from None + if value in ("y", "yes"): + rv = True + elif value in ("n", "no"): + rv = False + elif default is not None and value == "": + rv = default + else: + echo(_("Error: invalid input"), err=err) + continue + break + if abort and not rv: + raise Abort() + return rv + + +def echo_via_pager( + text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], + color: t.Optional[bool] = None, +) -> None: + """This function takes a text and shows it via an environment specific + pager on stdout. + + .. versionchanged:: 3.0 + Added the `color` flag. + + :param text_or_generator: the text to page, or alternatively, a + generator emitting the text to page. + :param color: controls if the pager supports ANSI colors or not. The + default is autodetection. + """ + color = resolve_color_default(color) + + if inspect.isgeneratorfunction(text_or_generator): + i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() + elif isinstance(text_or_generator, str): + i = [text_or_generator] + else: + i = iter(t.cast(t.Iterable[str], text_or_generator)) + + # convert every element of i to a text type if necessary + text_generator = (el if isinstance(el, str) else str(el) for el in i) + + from ._termui_impl import pager + + return pager(itertools.chain(text_generator, "\n"), color) + + +def progressbar( + iterable: t.Optional[t.Iterable[V]] = None, + length: t.Optional[int] = None, + label: t.Optional[str] = None, + show_eta: bool = True, + show_percent: t.Optional[bool] = None, + show_pos: bool = False, + item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, + fill_char: str = "#", + empty_char: str = "-", + bar_template: str = "%(label)s [%(bar)s] %(info)s", + info_sep: str = " ", + width: int = 36, + file: t.Optional[t.TextIO] = None, + color: t.Optional[bool] = None, + update_min_steps: int = 1, +) -> "ProgressBar[V]": + """This function creates an iterable context manager that can be used + to iterate over something while showing a progress bar. It will + either iterate over the `iterable` or `length` items (that are counted + up). While iteration happens, this function will print a rendered + progress bar to the given `file` (defaults to stdout) and will attempt + to calculate remaining time and more. By default, this progress bar + will not be rendered if the file is not a terminal. + + The context manager creates the progress bar. When the context + manager is entered the progress bar is already created. With every + iteration over the progress bar, the iterable passed to the bar is + advanced and the bar is updated. When the context manager exits, + a newline is printed and the progress bar is finalized on screen. + + Note: The progress bar is currently designed for use cases where the + total progress can be expected to take at least several seconds. + Because of this, the ProgressBar class object won't display + progress that is considered too fast, and progress where the time + between steps is less than a second. + + No printing must happen or the progress bar will be unintentionally + destroyed. + + Example usage:: + + with progressbar(items) as bar: + for item in bar: + do_something_with(item) + + Alternatively, if no iterable is specified, one can manually update the + progress bar through the `update()` method instead of directly + iterating over the progress bar. The update method accepts the number + of steps to increment the bar with:: + + with progressbar(length=chunks.total_bytes) as bar: + for chunk in chunks: + process_chunk(chunk) + bar.update(chunks.bytes) + + The ``update()`` method also takes an optional value specifying the + ``current_item`` at the new position. This is useful when used + together with ``item_show_func`` to customize the output for each + manual step:: + + with click.progressbar( + length=total_size, + label='Unzipping archive', + item_show_func=lambda a: a.filename + ) as bar: + for archive in zip_file: + archive.extract() + bar.update(archive.size, archive) + + :param iterable: an iterable to iterate over. If not provided the length + is required. + :param length: the number of items to iterate over. By default the + progressbar will attempt to ask the iterator about its + length, which might or might not work. If an iterable is + also provided this parameter can be used to override the + length. If an iterable is not provided the progress bar + will iterate over a range of that length. + :param label: the label to show next to the progress bar. + :param show_eta: enables or disables the estimated time display. This is + automatically disabled if the length cannot be + determined. + :param show_percent: enables or disables the percentage display. The + default is `True` if the iterable has a length or + `False` if not. + :param show_pos: enables or disables the absolute position display. The + default is `False`. + :param item_show_func: A function called with the current item which + can return a string to show next to the progress bar. If the + function returns ``None`` nothing is shown. The current item can + be ``None``, such as when entering and exiting the bar. + :param fill_char: the character to use to show the filled part of the + progress bar. + :param empty_char: the character to use to show the non-filled part of + the progress bar. + :param bar_template: the format string to use as template for the bar. + The parameters in it are ``label`` for the label, + ``bar`` for the progress bar and ``info`` for the + info section. + :param info_sep: the separator between multiple info items (eta etc.) + :param width: the width of the progress bar in characters, 0 means full + terminal width + :param file: The file to write to. If this is not a terminal then + only the label is printed. + :param color: controls if the terminal supports ANSI colors or not. The + default is autodetection. This is only needed if ANSI + codes are included anywhere in the progress bar output + which is not the case by default. + :param update_min_steps: Render only when this many updates have + completed. This allows tuning for very fast iterators. + + .. versionchanged:: 8.0 + Output is shown even if execution time is less than 0.5 seconds. + + .. versionchanged:: 8.0 + ``item_show_func`` shows the current item, not the previous one. + + .. versionchanged:: 8.0 + Labels are echoed if the output is not a TTY. Reverts a change + in 7.0 that removed all output. + + .. versionadded:: 8.0 + Added the ``update_min_steps`` parameter. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. Added the ``update`` method to + the object. + + .. versionadded:: 2.0 + """ + from ._termui_impl import ProgressBar + + color = resolve_color_default(color) + return ProgressBar( + iterable=iterable, + length=length, + show_eta=show_eta, + show_percent=show_percent, + show_pos=show_pos, + item_show_func=item_show_func, + fill_char=fill_char, + empty_char=empty_char, + bar_template=bar_template, + info_sep=info_sep, + file=file, + label=label, + width=width, + color=color, + update_min_steps=update_min_steps, + ) + + +def clear() -> None: + """Clears the terminal screen. This will have the effect of clearing + the whole visible space of the terminal and moving the cursor to the + top left. This does not do anything if not connected to a terminal. + + .. versionadded:: 2.0 + """ + if not isatty(sys.stdout): + return + + # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor + echo("\033[2J\033[1;1H", nl=False) + + +def _interpret_color( + color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 +) -> str: + if isinstance(color, int): + return f"{38 + offset};5;{color:d}" + + if isinstance(color, (tuple, list)): + r, g, b = color + return f"{38 + offset};2;{r:d};{g:d};{b:d}" + + return str(_ansi_colors[color] + offset) + + +def style( + text: t.Any, + fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, + bold: t.Optional[bool] = None, + dim: t.Optional[bool] = None, + underline: t.Optional[bool] = None, + overline: t.Optional[bool] = None, + italic: t.Optional[bool] = None, + blink: t.Optional[bool] = None, + reverse: t.Optional[bool] = None, + strikethrough: t.Optional[bool] = None, + reset: bool = True, +) -> str: + """Styles a text with ANSI styles and returns the new string. By + default the styling is self contained which means that at the end + of the string a reset code is issued. This can be prevented by + passing ``reset=False``. + + Examples:: + + click.echo(click.style('Hello World!', fg='green')) + click.echo(click.style('ATTENTION!', blink=True)) + click.echo(click.style('Some things', reverse=True, fg='cyan')) + click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) + + Supported color names: + + * ``black`` (might be a gray) + * ``red`` + * ``green`` + * ``yellow`` (might be an orange) + * ``blue`` + * ``magenta`` + * ``cyan`` + * ``white`` (might be light gray) + * ``bright_black`` + * ``bright_red`` + * ``bright_green`` + * ``bright_yellow`` + * ``bright_blue`` + * ``bright_magenta`` + * ``bright_cyan`` + * ``bright_white`` + * ``reset`` (reset the color code only) + + If the terminal supports it, color may also be specified as: + + - An integer in the interval [0, 255]. The terminal must support + 8-bit/256-color mode. + - An RGB tuple of three integers in [0, 255]. The terminal must + support 24-bit/true-color mode. + + See https://en.wikipedia.org/wiki/ANSI_color and + https://gist.github.com/XVilka/8346728 for more information. + + :param text: the string to style with ansi codes. + :param fg: if provided this will become the foreground color. + :param bg: if provided this will become the background color. + :param bold: if provided this will enable or disable bold mode. + :param dim: if provided this will enable or disable dim mode. This is + badly supported. + :param underline: if provided this will enable or disable underline. + :param overline: if provided this will enable or disable overline. + :param italic: if provided this will enable or disable italic. + :param blink: if provided this will enable or disable blinking. + :param reverse: if provided this will enable or disable inverse + rendering (foreground becomes background and the + other way round). + :param strikethrough: if provided this will enable or disable + striking through text. + :param reset: by default a reset-all code is added at the end of the + string which means that styles do not carry over. This + can be disabled to compose styles. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. + + .. versionchanged:: 8.0 + Added support for 256 and RGB color codes. + + .. versionchanged:: 8.0 + Added the ``strikethrough``, ``italic``, and ``overline`` + parameters. + + .. versionchanged:: 7.0 + Added support for bright colors. + + .. versionadded:: 2.0 + """ + if not isinstance(text, str): + text = str(text) + + bits = [] + + if fg: + try: + bits.append(f"\033[{_interpret_color(fg)}m") + except KeyError: + raise TypeError(f"Unknown color {fg!r}") from None + + if bg: + try: + bits.append(f"\033[{_interpret_color(bg, 10)}m") + except KeyError: + raise TypeError(f"Unknown color {bg!r}") from None + + if bold is not None: + bits.append(f"\033[{1 if bold else 22}m") + if dim is not None: + bits.append(f"\033[{2 if dim else 22}m") + if underline is not None: + bits.append(f"\033[{4 if underline else 24}m") + if overline is not None: + bits.append(f"\033[{53 if overline else 55}m") + if italic is not None: + bits.append(f"\033[{3 if italic else 23}m") + if blink is not None: + bits.append(f"\033[{5 if blink else 25}m") + if reverse is not None: + bits.append(f"\033[{7 if reverse else 27}m") + if strikethrough is not None: + bits.append(f"\033[{9 if strikethrough else 29}m") + bits.append(text) + if reset: + bits.append(_ansi_reset_all) + return "".join(bits) + + +def unstyle(text: str) -> str: + """Removes ANSI styling information from a string. Usually it's not + necessary to use this function as Click's echo function will + automatically remove styling if necessary. + + .. versionadded:: 2.0 + + :param text: the text to remove style information from. + """ + return strip_ansi(text) + + +def secho( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.AnyStr]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, + **styles: t.Any, +) -> None: + """This function combines :func:`echo` and :func:`style` into one + call. As such the following two calls are the same:: + + click.secho('Hello World!', fg='green') + click.echo(click.style('Hello World!', fg='green')) + + All keyword arguments are forwarded to the underlying functions + depending on which one they go with. + + Non-string types will be converted to :class:`str`. However, + :class:`bytes` are passed directly to :meth:`echo` without applying + style. If you want to style bytes that represent text, call + :meth:`bytes.decode` first. + + .. versionchanged:: 8.0 + A non-string ``message`` is converted to a string. Bytes are + passed through without style applied. + + .. versionadded:: 2.0 + """ + if message is not None and not isinstance(message, (bytes, bytearray)): + message = style(message, **styles) + + return echo(message, file=file, nl=nl, err=err, color=color) + + +def edit( + text: t.Optional[t.AnyStr] = None, + editor: t.Optional[str] = None, + env: t.Optional[t.Mapping[str, str]] = None, + require_save: bool = True, + extension: str = ".txt", + filename: t.Optional[str] = None, +) -> t.Optional[t.AnyStr]: + r"""Edits the given text in the defined editor. If an editor is given + (should be the full path to the executable but the regular operating + system search path is used for finding the executable) it overrides + the detected editor. Optionally, some environment variables can be + used. If the editor is closed without changes, `None` is returned. In + case a file is edited directly the return value is always `None` and + `require_save` and `extension` are ignored. + + If the editor cannot be opened a :exc:`UsageError` is raised. + + Note for Windows: to simplify cross-platform usage, the newlines are + automatically converted from POSIX to Windows and vice versa. As such, + the message here will have ``\n`` as newline markers. + + :param text: the text to edit. + :param editor: optionally the editor to use. Defaults to automatic + detection. + :param env: environment variables to forward to the editor. + :param require_save: if this is true, then not saving in the editor + will make the return value become `None`. + :param extension: the extension to tell the editor about. This defaults + to `.txt` but changing this might change syntax + highlighting. + :param filename: if provided it will edit this file instead of the + provided text contents. It will not use a temporary + file as an indirection in that case. + """ + from ._termui_impl import Editor + + ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) + + if filename is None: + return ed.edit(text) + + ed.edit_file(filename) + return None + + +def launch(url: str, wait: bool = False, locate: bool = False) -> int: + """This function launches the given URL (or filename) in the default + viewer application for this file type. If this is an executable, it + might launch the executable in a new session. The return value is + the exit code of the launched application. Usually, ``0`` indicates + success. + + Examples:: + + click.launch('https://click.palletsprojects.com/') + click.launch('/my/downloaded/file', locate=True) + + .. versionadded:: 2.0 + + :param url: URL or filename of the thing to launch. + :param wait: Wait for the program to exit before returning. This + only works if the launched program blocks. In particular, + ``xdg-open`` on Linux does not block. + :param locate: if this is set to `True` then instead of launching the + application associated with the URL it will attempt to + launch a file manager with the file located. This + might have weird effects if the URL does not point to + the filesystem. + """ + from ._termui_impl import open_url + + return open_url(url, wait=wait, locate=locate) + + +# If this is provided, getchar() calls into this instead. This is used +# for unittesting purposes. +_getchar: t.Optional[t.Callable[[bool], str]] = None + + +def getchar(echo: bool = False) -> str: + """Fetches a single character from the terminal and returns it. This + will always return a unicode character and under certain rare + circumstances this might return more than one character. The + situations which more than one character is returned is when for + whatever reason multiple characters end up in the terminal buffer or + standard input was not actually a terminal. + + Note that this will always read from the terminal, even if something + is piped into the standard input. + + Note for Windows: in rare cases when typing non-ASCII characters, this + function might wait for a second character and then return both at once. + This is because certain Unicode characters look like special-key markers. + + .. versionadded:: 2.0 + + :param echo: if set to `True`, the character read will also show up on + the terminal. The default is to not show it. + """ + global _getchar + + if _getchar is None: + from ._termui_impl import getchar as f + + _getchar = f + + return _getchar(echo) + + +def raw_terminal() -> t.ContextManager[int]: + from ._termui_impl import raw_terminal as f + + return f() + + +def pause(info: t.Optional[str] = None, err: bool = False) -> None: + """This command stops execution and waits for the user to press any + key to continue. This is similar to the Windows batch "pause" + command. If the program is not run through a terminal, this command + will instead do nothing. + + .. versionadded:: 2.0 + + .. versionadded:: 4.0 + Added the `err` parameter. + + :param info: The message to print before pausing. Defaults to + ``"Press any key to continue..."``. + :param err: if set to message goes to ``stderr`` instead of + ``stdout``, the same as with echo. + """ + if not isatty(sys.stdin) or not isatty(sys.stdout): + return + + if info is None: + info = _("Press any key to continue...") + + try: + if info: + echo(info, nl=False, err=err) + try: + getchar() + except (KeyboardInterrupt, EOFError): + pass + finally: + if info: + echo(err=err) diff --git a/backend/test/lib/python3.8/site-packages/click/testing.py b/backend/test/lib/python3.8/site-packages/click/testing.py new file mode 100644 index 0000000000000000000000000000000000000000..e0df0d2a657fe19523957b85964b9956e5c78a30 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/testing.py @@ -0,0 +1,479 @@ +import contextlib +import io +import os +import shlex +import shutil +import sys +import tempfile +import typing as t +from types import TracebackType + +from . import formatting +from . import termui +from . import utils +from ._compat import _find_binary_reader + +if t.TYPE_CHECKING: + from .core import BaseCommand + + +class EchoingStdin: + def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: + self._input = input + self._output = output + self._paused = False + + def __getattr__(self, x: str) -> t.Any: + return getattr(self._input, x) + + def _echo(self, rv: bytes) -> bytes: + if not self._paused: + self._output.write(rv) + + return rv + + def read(self, n: int = -1) -> bytes: + return self._echo(self._input.read(n)) + + def read1(self, n: int = -1) -> bytes: + return self._echo(self._input.read1(n)) # type: ignore + + def readline(self, n: int = -1) -> bytes: + return self._echo(self._input.readline(n)) + + def readlines(self) -> t.List[bytes]: + return [self._echo(x) for x in self._input.readlines()] + + def __iter__(self) -> t.Iterator[bytes]: + return iter(self._echo(x) for x in self._input) + + def __repr__(self) -> str: + return repr(self._input) + + +@contextlib.contextmanager +def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: + if stream is None: + yield + else: + stream._paused = True + yield + stream._paused = False + + +class _NamedTextIOWrapper(io.TextIOWrapper): + def __init__( + self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any + ) -> None: + super().__init__(buffer, **kwargs) + self._name = name + self._mode = mode + + @property + def name(self) -> str: + return self._name + + @property + def mode(self) -> str: + return self._mode + + +def make_input_stream( + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]], charset: str +) -> t.BinaryIO: + # Is already an input stream. + if hasattr(input, "read"): + rv = _find_binary_reader(t.cast(t.IO[t.Any], input)) + + if rv is not None: + return rv + + raise TypeError("Could not find binary reader for input stream.") + + if input is None: + input = b"" + elif isinstance(input, str): + input = input.encode(charset) + + return io.BytesIO(input) + + +class Result: + """Holds the captured result of an invoked CLI script.""" + + def __init__( + self, + runner: "CliRunner", + stdout_bytes: bytes, + stderr_bytes: t.Optional[bytes], + return_value: t.Any, + exit_code: int, + exception: t.Optional[BaseException], + exc_info: t.Optional[ + t.Tuple[t.Type[BaseException], BaseException, TracebackType] + ] = None, + ): + #: The runner that created the result + self.runner = runner + #: The standard output as bytes. + self.stdout_bytes = stdout_bytes + #: The standard error as bytes, or None if not available + self.stderr_bytes = stderr_bytes + #: The value returned from the invoked command. + #: + #: .. versionadded:: 8.0 + self.return_value = return_value + #: The exit code as integer. + self.exit_code = exit_code + #: The exception that happened if one did. + self.exception = exception + #: The traceback + self.exc_info = exc_info + + @property + def output(self) -> str: + """The (standard) output as unicode string.""" + return self.stdout + + @property + def stdout(self) -> str: + """The standard output as unicode string.""" + return self.stdout_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + @property + def stderr(self) -> str: + """The standard error as unicode string.""" + if self.stderr_bytes is None: + raise ValueError("stderr not separately captured") + return self.stderr_bytes.decode(self.runner.charset, "replace").replace( + "\r\n", "\n" + ) + + def __repr__(self) -> str: + exc_str = repr(self.exception) if self.exception else "okay" + return f"<{type(self).__name__} {exc_str}>" + + +class CliRunner: + """The CLI runner provides functionality to invoke a Click command line + script for unittesting purposes in a isolated environment. This only + works in single-threaded systems without any concurrency as it changes the + global interpreter state. + + :param charset: the character set for the input and output data. + :param env: a dictionary with environment variables for overriding. + :param echo_stdin: if this is set to `True`, then reading from stdin writes + to stdout. This is useful for showing examples in + some circumstances. Note that regular prompts + will automatically echo the input. + :param mix_stderr: if this is set to `False`, then stdout and stderr are + preserved as independent streams. This is useful for + Unix-philosophy apps that have predictable stdout and + noisy stderr, such that each may be measured + independently + """ + + def __init__( + self, + charset: str = "utf-8", + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + echo_stdin: bool = False, + mix_stderr: bool = True, + ) -> None: + self.charset = charset + self.env: t.Mapping[str, t.Optional[str]] = env or {} + self.echo_stdin = echo_stdin + self.mix_stderr = mix_stderr + + def get_default_prog_name(self, cli: "BaseCommand") -> str: + """Given a command object it will return the default program name + for it. The default is the `name` attribute or ``"root"`` if not + set. + """ + return cli.name or "root" + + def make_env( + self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None + ) -> t.Mapping[str, t.Optional[str]]: + """Returns the environment overrides for invoking a script.""" + rv = dict(self.env) + if overrides: + rv.update(overrides) + return rv + + @contextlib.contextmanager + def isolation( + self, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + color: bool = False, + ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: + """A context manager that sets up the isolation for invoking of a + command line tool. This sets up stdin with the given input data + and `os.environ` with the overrides from the given dictionary. + This also rebinds some internals in Click to be mocked (like the + prompt functionality). + + This is automatically done in the :meth:`invoke` method. + + :param input: the input stream to put into sys.stdin. + :param env: the environment overrides as dictionary. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + ``stderr`` is opened with ``errors="backslashreplace"`` + instead of the default ``"strict"``. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + """ + bytes_input = make_input_stream(input, self.charset) + echo_input = None + + old_stdin = sys.stdin + old_stdout = sys.stdout + old_stderr = sys.stderr + old_forced_width = formatting.FORCED_WIDTH + formatting.FORCED_WIDTH = 80 + + env = self.make_env(env) + + bytes_output = io.BytesIO() + + if self.echo_stdin: + bytes_input = echo_input = t.cast( + t.BinaryIO, EchoingStdin(bytes_input, bytes_output) + ) + + sys.stdin = text_input = _NamedTextIOWrapper( + bytes_input, encoding=self.charset, name="<stdin>", mode="r" + ) + + if self.echo_stdin: + # Force unbuffered reads, otherwise TextIOWrapper reads a + # large chunk which is echoed early. + text_input._CHUNK_SIZE = 1 # type: ignore + + sys.stdout = _NamedTextIOWrapper( + bytes_output, encoding=self.charset, name="<stdout>", mode="w" + ) + + bytes_error = None + if self.mix_stderr: + sys.stderr = sys.stdout + else: + bytes_error = io.BytesIO() + sys.stderr = _NamedTextIOWrapper( + bytes_error, + encoding=self.charset, + name="<stderr>", + mode="w", + errors="backslashreplace", + ) + + @_pause_echo(echo_input) # type: ignore + def visible_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(prompt or "") + val = text_input.readline().rstrip("\r\n") + sys.stdout.write(f"{val}\n") + sys.stdout.flush() + return val + + @_pause_echo(echo_input) # type: ignore + def hidden_input(prompt: t.Optional[str] = None) -> str: + sys.stdout.write(f"{prompt or ''}\n") + sys.stdout.flush() + return text_input.readline().rstrip("\r\n") + + @_pause_echo(echo_input) # type: ignore + def _getchar(echo: bool) -> str: + char = sys.stdin.read(1) + + if echo: + sys.stdout.write(char) + + sys.stdout.flush() + return char + + default_color = color + + def should_strip_ansi( + stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None + ) -> bool: + if color is None: + return not default_color + return not color + + old_visible_prompt_func = termui.visible_prompt_func + old_hidden_prompt_func = termui.hidden_prompt_func + old__getchar_func = termui._getchar + old_should_strip_ansi = utils.should_strip_ansi # type: ignore + termui.visible_prompt_func = visible_input + termui.hidden_prompt_func = hidden_input + termui._getchar = _getchar + utils.should_strip_ansi = should_strip_ansi # type: ignore + + old_env = {} + try: + for key, value in env.items(): + old_env[key] = os.environ.get(key) + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + yield (bytes_output, bytes_error) + finally: + for key, value in old_env.items(): + if value is None: + try: + del os.environ[key] + except Exception: + pass + else: + os.environ[key] = value + sys.stdout = old_stdout + sys.stderr = old_stderr + sys.stdin = old_stdin + termui.visible_prompt_func = old_visible_prompt_func + termui.hidden_prompt_func = old_hidden_prompt_func + termui._getchar = old__getchar_func + utils.should_strip_ansi = old_should_strip_ansi # type: ignore + formatting.FORCED_WIDTH = old_forced_width + + def invoke( + self, + cli: "BaseCommand", + args: t.Optional[t.Union[str, t.Sequence[str]]] = None, + input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, + env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, + catch_exceptions: bool = True, + color: bool = False, + **extra: t.Any, + ) -> Result: + """Invokes a command in an isolated environment. The arguments are + forwarded directly to the command line script, the `extra` keyword + arguments are passed to the :meth:`~clickpkg.Command.main` function of + the command. + + This returns a :class:`Result` object. + + :param cli: the command to invoke + :param args: the arguments to invoke. It may be given as an iterable + or a string. When given as string it will be interpreted + as a Unix shell command. More details at + :func:`shlex.split`. + :param input: the input data for `sys.stdin`. + :param env: the environment overrides. + :param catch_exceptions: Whether to catch any other exceptions than + ``SystemExit``. + :param extra: the keyword arguments to pass to :meth:`main`. + :param color: whether the output should contain color codes. The + application can still override this explicitly. + + .. versionchanged:: 8.0 + The result object has the ``return_value`` attribute with + the value returned from the invoked command. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionchanged:: 3.0 + Added the ``catch_exceptions`` parameter. + + .. versionchanged:: 3.0 + The result object has the ``exc_info`` attribute with the + traceback if available. + """ + exc_info = None + with self.isolation(input=input, env=env, color=color) as outstreams: + return_value = None + exception: t.Optional[BaseException] = None + exit_code = 0 + + if isinstance(args, str): + args = shlex.split(args) + + try: + prog_name = extra.pop("prog_name") + except KeyError: + prog_name = self.get_default_prog_name(cli) + + try: + return_value = cli.main(args=args or (), prog_name=prog_name, **extra) + except SystemExit as e: + exc_info = sys.exc_info() + e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) + + if e_code is None: + e_code = 0 + + if e_code != 0: + exception = e + + if not isinstance(e_code, int): + sys.stdout.write(str(e_code)) + sys.stdout.write("\n") + e_code = 1 + + exit_code = e_code + + except Exception as e: + if not catch_exceptions: + raise + exception = e + exit_code = 1 + exc_info = sys.exc_info() + finally: + sys.stdout.flush() + stdout = outstreams[0].getvalue() + if self.mix_stderr: + stderr = None + else: + stderr = outstreams[1].getvalue() # type: ignore + + return Result( + runner=self, + stdout_bytes=stdout, + stderr_bytes=stderr, + return_value=return_value, + exit_code=exit_code, + exception=exception, + exc_info=exc_info, # type: ignore + ) + + @contextlib.contextmanager + def isolated_filesystem( + self, temp_dir: t.Optional[t.Union[str, "os.PathLike[str]"]] = None + ) -> t.Iterator[str]: + """A context manager that creates a temporary directory and + changes the current working directory to it. This isolates tests + that affect the contents of the CWD to prevent them from + interfering with each other. + + :param temp_dir: Create the temporary directory under this + directory. If given, the created directory is not removed + when exiting. + + .. versionchanged:: 8.0 + Added the ``temp_dir`` parameter. + """ + cwd = os.getcwd() + dt = tempfile.mkdtemp(dir=temp_dir) + os.chdir(dt) + + try: + yield dt + finally: + os.chdir(cwd) + + if temp_dir is None: + try: + shutil.rmtree(dt) + except OSError: # noqa: B014 + pass diff --git a/backend/test/lib/python3.8/site-packages/click/types.py b/backend/test/lib/python3.8/site-packages/click/types.py new file mode 100644 index 0000000000000000000000000000000000000000..2b1d1797f2e115e9bc976bcaf7d8e1884a91e91c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/types.py @@ -0,0 +1,1089 @@ +import os +import stat +import sys +import typing as t +from datetime import datetime +from gettext import gettext as _ +from gettext import ngettext + +from ._compat import _get_argv_encoding +from ._compat import open_stream +from .exceptions import BadParameter +from .utils import format_filename +from .utils import LazyFile +from .utils import safecall + +if t.TYPE_CHECKING: + import typing_extensions as te + from .core import Context + from .core import Parameter + from .shell_completion import CompletionItem + + +class ParamType: + """Represents the type of a parameter. Validates and converts values + from the command line or Python into the correct type. + + To implement a custom type, subclass and implement at least the + following: + + - The :attr:`name` class attribute must be set. + - Calling an instance of the type with ``None`` must return + ``None``. This is already implemented by default. + - :meth:`convert` must convert string values to the correct type. + - :meth:`convert` must accept values that are already the correct + type. + - It must be able to convert a value if the ``ctx`` and ``param`` + arguments are ``None``. This can occur when converting prompt + input. + """ + + is_composite: t.ClassVar[bool] = False + arity: t.ClassVar[int] = 1 + + #: the descriptive name of this type + name: str + + #: if a list of this type is expected and the value is pulled from a + #: string environment variable, this is what splits it up. `None` + #: means any whitespace. For all parameters the general rule is that + #: whitespace splits them up. The exception are paths and files which + #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on + #: Windows). + envvar_list_splitter: t.ClassVar[t.Optional[str]] = None + + def to_info_dict(self) -> t.Dict[str, t.Any]: + """Gather information that could be useful for a tool generating + user-facing documentation. + + Use :meth:`click.Context.to_info_dict` to traverse the entire + CLI structure. + + .. versionadded:: 8.0 + """ + # The class name without the "ParamType" suffix. + param_type = type(self).__name__.partition("ParamType")[0] + param_type = param_type.partition("ParameterType")[0] + + # Custom subclasses might not remember to set a name. + if hasattr(self, "name"): + name = self.name + else: + name = param_type + + return {"param_type": param_type, "name": name} + + def __call__( + self, + value: t.Any, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> t.Any: + if value is not None: + return self.convert(value, param, ctx) + + def get_metavar(self, param: "Parameter") -> t.Optional[str]: + """Returns the metavar default for this param if it provides one.""" + + def get_missing_message(self, param: "Parameter") -> t.Optional[str]: + """Optionally might return extra information about a missing + parameter. + + .. versionadded:: 2.0 + """ + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + """Convert the value to the correct type. This is not called if + the value is ``None`` (the missing value). + + This must accept string values from the command line, as well as + values that are already the correct type. It may also convert + other compatible types. + + The ``param`` and ``ctx`` arguments may be ``None`` in certain + situations, such as when converting prompt input. + + If the value cannot be converted, call :meth:`fail` with a + descriptive message. + + :param value: The value to convert. + :param param: The parameter that is using this type to convert + its value. May be ``None``. + :param ctx: The current context that arrived at this value. May + be ``None``. + """ + return value + + def split_envvar_value(self, rv: str) -> t.Sequence[str]: + """Given a value from an environment variable this splits it up + into small chunks depending on the defined envvar list splitter. + + If the splitter is set to `None`, which means that whitespace splits, + then leading and trailing whitespace is ignored. Otherwise, leading + and trailing splitters usually lead to empty items being included. + """ + return (rv or "").split(self.envvar_list_splitter) + + def fail( + self, + message: str, + param: t.Optional["Parameter"] = None, + ctx: t.Optional["Context"] = None, + ) -> "t.NoReturn": + """Helper method to fail with an invalid value message.""" + raise BadParameter(message, ctx=ctx, param=param) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a list of + :class:`~click.shell_completion.CompletionItem` objects for the + incomplete value. Most types do not provide completions, but + some do, and this allows custom types to provide custom + completions as well. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + return [] + + +class CompositeParamType(ParamType): + is_composite = True + + @property + def arity(self) -> int: # type: ignore + raise NotImplementedError() + + +class FuncParamType(ParamType): + def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: + self.name: str = func.__name__ + self.func = func + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["func"] = self.func + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self.func(value) + except ValueError: + try: + value = str(value) + except UnicodeError: + value = value.decode("utf-8", "replace") + + self.fail(value, param, ctx) + + +class UnprocessedParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + return value + + def __repr__(self) -> str: + return "UNPROCESSED" + + +class StringParamType(ParamType): + name = "text" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, bytes): + enc = _get_argv_encoding() + try: + value = value.decode(enc) + except UnicodeError: + fs_enc = sys.getfilesystemencoding() + if fs_enc != enc: + try: + value = value.decode(fs_enc) + except UnicodeError: + value = value.decode("utf-8", "replace") + else: + value = value.decode("utf-8", "replace") + return value + return str(value) + + def __repr__(self) -> str: + return "STRING" + + +class Choice(ParamType): + """The choice type allows a value to be checked against a fixed set + of supported values. All of these values have to be strings. + + You should only pass a list or tuple of choices. Other iterables + (like generators) may lead to surprising results. + + The resulting value will always be one of the originally passed choices + regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` + being specified. + + See :ref:`choice-opts` for an example. + + :param case_sensitive: Set to false to make choices case + insensitive. Defaults to true. + """ + + name = "choice" + + def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: + self.choices = choices + self.case_sensitive = case_sensitive + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["choices"] = self.choices + info_dict["case_sensitive"] = self.case_sensitive + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + choices_str = "|".join(self.choices) + + # Use curly braces to indicate a required argument. + if param.required and param.param_type_name == "argument": + return f"{{{choices_str}}}" + + # Use square braces to indicate an option or optional argument. + return f"[{choices_str}]" + + def get_missing_message(self, param: "Parameter") -> str: + return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + # Match through normalization and case sensitivity + # first do token_normalize_func, then lowercase + # preserve original `value` to produce an accurate message in + # `self.fail` + normed_value = value + normed_choices = {choice: choice for choice in self.choices} + + if ctx is not None and ctx.token_normalize_func is not None: + normed_value = ctx.token_normalize_func(value) + normed_choices = { + ctx.token_normalize_func(normed_choice): original + for normed_choice, original in normed_choices.items() + } + + if not self.case_sensitive: + normed_value = normed_value.casefold() + normed_choices = { + normed_choice.casefold(): original + for normed_choice, original in normed_choices.items() + } + + if normed_value in normed_choices: + return normed_choices[normed_value] + + choices_str = ", ".join(map(repr, self.choices)) + self.fail( + ngettext( + "{value!r} is not {choice}.", + "{value!r} is not one of {choices}.", + len(self.choices), + ).format(value=value, choice=choices_str, choices=choices_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return f"Choice({list(self.choices)})" + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Complete choices that start with the incomplete value. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + str_choices = map(str, self.choices) + + if self.case_sensitive: + matched = (c for c in str_choices if c.startswith(incomplete)) + else: + incomplete = incomplete.lower() + matched = (c for c in str_choices if c.lower().startswith(incomplete)) + + return [CompletionItem(c) for c in matched] + + +class DateTime(ParamType): + """The DateTime type converts date strings into `datetime` objects. + + The format strings which are checked are configurable, but default to some + common (non-timezone aware) ISO 8601 formats. + + When specifying *DateTime* formats, you should only pass a list or a tuple. + Other iterables, like generators, may lead to surprising results. + + The format strings are processed using ``datetime.strptime``, and this + consequently defines the format strings which are allowed. + + Parsing is tried using each format, in order, and the first format which + parses successfully is used. + + :param formats: A list or tuple of date format strings, in the order in + which they should be tried. Defaults to + ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, + ``'%Y-%m-%d %H:%M:%S'``. + """ + + name = "datetime" + + def __init__(self, formats: t.Optional[t.Sequence[str]] = None): + self.formats: t.Sequence[str] = formats or [ + "%Y-%m-%d", + "%Y-%m-%dT%H:%M:%S", + "%Y-%m-%d %H:%M:%S", + ] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["formats"] = self.formats + return info_dict + + def get_metavar(self, param: "Parameter") -> str: + return f"[{'|'.join(self.formats)}]" + + def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: + try: + return datetime.strptime(value, format) + except ValueError: + return None + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if isinstance(value, datetime): + return value + + for format in self.formats: + converted = self._try_to_convert_date(value, format) + + if converted is not None: + return converted + + formats_str = ", ".join(map(repr, self.formats)) + self.fail( + ngettext( + "{value!r} does not match the format {format}.", + "{value!r} does not match the formats {formats}.", + len(self.formats), + ).format(value=value, format=formats_str, formats=formats_str), + param, + ctx, + ) + + def __repr__(self) -> str: + return "DateTime" + + +class _NumberParamTypeBase(ParamType): + _number_class: t.ClassVar[t.Type[t.Any]] + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + try: + return self._number_class(value) + except ValueError: + self.fail( + _("{value!r} is not a valid {number_type}.").format( + value=value, number_type=self.name + ), + param, + ctx, + ) + + +class _NumberRangeBase(_NumberParamTypeBase): + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + self.min = min + self.max = max + self.min_open = min_open + self.max_open = max_open + self.clamp = clamp + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + min=self.min, + max=self.max, + min_open=self.min_open, + max_open=self.max_open, + clamp=self.clamp, + ) + return info_dict + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import operator + + rv = super().convert(value, param, ctx) + lt_min: bool = self.min is not None and ( + operator.le if self.min_open else operator.lt + )(rv, self.min) + gt_max: bool = self.max is not None and ( + operator.ge if self.max_open else operator.gt + )(rv, self.max) + + if self.clamp: + if lt_min: + return self._clamp(self.min, 1, self.min_open) # type: ignore + + if gt_max: + return self._clamp(self.max, -1, self.max_open) # type: ignore + + if lt_min or gt_max: + self.fail( + _("{value} is not in the range {range}.").format( + value=rv, range=self._describe_range() + ), + param, + ctx, + ) + + return rv + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + """Find the valid value to clamp to bound in the given + direction. + + :param bound: The boundary value. + :param dir: 1 or -1 indicating the direction to move. + :param open: If true, the range does not include the bound. + """ + raise NotImplementedError + + def _describe_range(self) -> str: + """Describe the range for use in help text.""" + if self.min is None: + op = "<" if self.max_open else "<=" + return f"x{op}{self.max}" + + if self.max is None: + op = ">" if self.min_open else ">=" + return f"x{op}{self.min}" + + lop = "<" if self.min_open else "<=" + rop = "<" if self.max_open else "<=" + return f"{self.min}{lop}x{rop}{self.max}" + + def __repr__(self) -> str: + clamp = " clamped" if self.clamp else "" + return f"<{type(self).__name__} {self._describe_range()}{clamp}>" + + +class IntParamType(_NumberParamTypeBase): + name = "integer" + _number_class = int + + def __repr__(self) -> str: + return "INT" + + +class IntRange(_NumberRangeBase, IntParamType): + """Restrict an :data:`click.INT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "integer range" + + def _clamp( # type: ignore + self, bound: int, dir: "te.Literal[1, -1]", open: bool + ) -> int: + if not open: + return bound + + return bound + dir + + +class FloatParamType(_NumberParamTypeBase): + name = "float" + _number_class = float + + def __repr__(self) -> str: + return "FLOAT" + + +class FloatRange(_NumberRangeBase, FloatParamType): + """Restrict a :data:`click.FLOAT` value to a range of accepted + values. See :ref:`ranges`. + + If ``min`` or ``max`` are not passed, any value is accepted in that + direction. If ``min_open`` or ``max_open`` are enabled, the + corresponding boundary is not included in the range. + + If ``clamp`` is enabled, a value outside the range is clamped to the + boundary instead of failing. This is not supported if either + boundary is marked ``open``. + + .. versionchanged:: 8.0 + Added the ``min_open`` and ``max_open`` parameters. + """ + + name = "float range" + + def __init__( + self, + min: t.Optional[float] = None, + max: t.Optional[float] = None, + min_open: bool = False, + max_open: bool = False, + clamp: bool = False, + ) -> None: + super().__init__( + min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp + ) + + if (min_open or max_open) and clamp: + raise TypeError("Clamping is not supported for open bounds.") + + def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: + if not open: + return bound + + # Could use Python 3.9's math.nextafter here, but clamping an + # open float range doesn't seem to be particularly useful. It's + # left up to the user to write a callback to do it if needed. + raise RuntimeError("Clamping is not supported for open bounds.") + + +class BoolParamType(ParamType): + name = "boolean" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + if value in {False, True}: + return bool(value) + + norm = value.strip().lower() + + if norm in {"1", "true", "t", "yes", "y", "on"}: + return True + + if norm in {"0", "false", "f", "no", "n", "off"}: + return False + + self.fail( + _("{value!r} is not a valid boolean.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "BOOL" + + +class UUIDParameterType(ParamType): + name = "uuid" + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + import uuid + + if isinstance(value, uuid.UUID): + return value + + value = value.strip() + + try: + return uuid.UUID(value) + except ValueError: + self.fail( + _("{value!r} is not a valid UUID.").format(value=value), param, ctx + ) + + def __repr__(self) -> str: + return "UUID" + + +class File(ParamType): + """Declares a parameter to be a file for reading or writing. The file + is automatically closed once the context tears down (after the command + finished working). + + Files can be opened for reading or writing. The special value ``-`` + indicates stdin or stdout depending on the mode. + + By default, the file is opened for reading text data, but it can also be + opened in binary mode or for writing. The encoding parameter can be used + to force a specific encoding. + + The `lazy` flag controls if the file should be opened immediately or upon + first IO. The default is to be non-lazy for standard input and output + streams as well as files opened for reading, `lazy` otherwise. When opening a + file lazily for reading, it is still opened temporarily for validation, but + will not be held open until first IO. lazy is mainly useful when opening + for writing to avoid creating the file until it is needed. + + Starting with Click 2.0, files can also be opened atomically in which + case all writes go into a separate file in the same folder and upon + completion the file will be moved over to the original location. This + is useful if a file regularly read by other users is modified. + + See :ref:`file-args` for more information. + """ + + name = "filename" + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: t.Optional[bool] = None, + atomic: bool = False, + ) -> None: + self.mode = mode + self.encoding = encoding + self.errors = errors + self.lazy = lazy + self.atomic = atomic + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update(mode=self.mode, encoding=self.encoding) + return info_dict + + def resolve_lazy_flag(self, value: "t.Union[str, os.PathLike[str]]") -> bool: + if self.lazy is not None: + return self.lazy + if os.fspath(value) == "-": + return False + elif "w" in self.mode: + return True + return False + + def convert( + self, + value: t.Union[str, "os.PathLike[str]", t.IO[t.Any]], + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> t.IO[t.Any]: + if _is_file_like(value): + return value + + value = t.cast("t.Union[str, os.PathLike[str]]", value) + + try: + lazy = self.resolve_lazy_flag(value) + + if lazy: + lf = LazyFile( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + if ctx is not None: + ctx.call_on_close(lf.close_intelligently) + + return t.cast(t.IO[t.Any], lf) + + f, should_close = open_stream( + value, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + + # If a context is provided, we automatically close the file + # at the end of the context execution (or flush out). If a + # context does not exist, it's the caller's responsibility to + # properly close the file. This for instance happens when the + # type is used with prompts. + if ctx is not None: + if should_close: + ctx.call_on_close(safecall(f.close)) + else: + ctx.call_on_close(safecall(f.flush)) + + return f + except OSError as e: # noqa: B014 + self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide file path completions. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + return [CompletionItem(incomplete, type="file")] + + +def _is_file_like(value: t.Any) -> "te.TypeGuard[t.IO[t.Any]]": + return hasattr(value, "read") or hasattr(value, "write") + + +class Path(ParamType): + """The ``Path`` type is similar to the :class:`File` type, but + returns the filename instead of an open file. Various checks can be + enabled to validate the type of file and permissions. + + :param exists: The file or directory needs to exist for the value to + be valid. If this is not set to ``True``, and the file does not + exist, then all further checks are silently skipped. + :param file_okay: Allow a file as a value. + :param dir_okay: Allow a directory as a value. + :param readable: if true, a readable check is performed. + :param writable: if true, a writable check is performed. + :param executable: if true, an executable check is performed. + :param resolve_path: Make the value absolute and resolve any + symlinks. A ``~`` is not expanded, as this is supposed to be + done by the shell only. + :param allow_dash: Allow a single dash as a value, which indicates + a standard stream (but does not open it). Use + :func:`~click.open_file` to handle opening this value. + :param path_type: Convert the incoming path value to this type. If + ``None``, keep Python's default, which is ``str``. Useful to + convert to :class:`pathlib.Path`. + + .. versionchanged:: 8.1 + Added the ``executable`` parameter. + + .. versionchanged:: 8.0 + Allow passing ``path_type=pathlib.Path``. + + .. versionchanged:: 6.0 + Added the ``allow_dash`` parameter. + """ + + envvar_list_splitter: t.ClassVar[str] = os.path.pathsep + + def __init__( + self, + exists: bool = False, + file_okay: bool = True, + dir_okay: bool = True, + writable: bool = False, + readable: bool = True, + resolve_path: bool = False, + allow_dash: bool = False, + path_type: t.Optional[t.Type[t.Any]] = None, + executable: bool = False, + ): + self.exists = exists + self.file_okay = file_okay + self.dir_okay = dir_okay + self.readable = readable + self.writable = writable + self.executable = executable + self.resolve_path = resolve_path + self.allow_dash = allow_dash + self.type = path_type + + if self.file_okay and not self.dir_okay: + self.name: str = _("file") + elif self.dir_okay and not self.file_okay: + self.name = _("directory") + else: + self.name = _("path") + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict.update( + exists=self.exists, + file_okay=self.file_okay, + dir_okay=self.dir_okay, + writable=self.writable, + readable=self.readable, + allow_dash=self.allow_dash, + ) + return info_dict + + def coerce_path_result( + self, value: "t.Union[str, os.PathLike[str]]" + ) -> "t.Union[str, bytes, os.PathLike[str]]": + if self.type is not None and not isinstance(value, self.type): + if self.type is str: + return os.fsdecode(value) + elif self.type is bytes: + return os.fsencode(value) + else: + return t.cast("os.PathLike[str]", self.type(value)) + + return value + + def convert( + self, + value: "t.Union[str, os.PathLike[str]]", + param: t.Optional["Parameter"], + ctx: t.Optional["Context"], + ) -> "t.Union[str, bytes, os.PathLike[str]]": + rv = value + + is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") + + if not is_dash: + if self.resolve_path: + # os.path.realpath doesn't resolve symlinks on Windows + # until Python 3.8. Use pathlib for now. + import pathlib + + rv = os.fsdecode(pathlib.Path(rv).resolve()) + + try: + st = os.stat(rv) + except OSError: + if not self.exists: + return self.coerce_path_result(rv) + self.fail( + _("{name} {filename!r} does not exist.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if not self.file_okay and stat.S_ISREG(st.st_mode): + self.fail( + _("{name} {filename!r} is a file.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + if not self.dir_okay and stat.S_ISDIR(st.st_mode): + self.fail( + _("{name} '{filename}' is a directory.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.readable and not os.access(rv, os.R_OK): + self.fail( + _("{name} {filename!r} is not readable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.writable and not os.access(rv, os.W_OK): + self.fail( + _("{name} {filename!r} is not writable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + if self.executable and not os.access(value, os.X_OK): + self.fail( + _("{name} {filename!r} is not executable.").format( + name=self.name.title(), filename=format_filename(value) + ), + param, + ctx, + ) + + return self.coerce_path_result(rv) + + def shell_complete( + self, ctx: "Context", param: "Parameter", incomplete: str + ) -> t.List["CompletionItem"]: + """Return a special completion marker that tells the completion + system to use the shell to provide path completions for only + directories or any paths. + + :param ctx: Invocation context for this command. + :param param: The parameter that is requesting completion. + :param incomplete: Value being completed. May be empty. + + .. versionadded:: 8.0 + """ + from click.shell_completion import CompletionItem + + type = "dir" if self.dir_okay and not self.file_okay else "file" + return [CompletionItem(incomplete, type=type)] + + +class Tuple(CompositeParamType): + """The default behavior of Click is to apply a type on a value directly. + This works well in most cases, except for when `nargs` is set to a fixed + count and different types should be used for different items. In this + case the :class:`Tuple` type can be used. This type can only be used + if `nargs` is set to a fixed number. + + For more information see :ref:`tuple-type`. + + This can be selected by using a Python tuple literal as a type. + + :param types: a list of types that should be used for the tuple items. + """ + + def __init__(self, types: t.Sequence[t.Union[t.Type[t.Any], ParamType]]) -> None: + self.types: t.Sequence[ParamType] = [convert_type(ty) for ty in types] + + def to_info_dict(self) -> t.Dict[str, t.Any]: + info_dict = super().to_info_dict() + info_dict["types"] = [t.to_info_dict() for t in self.types] + return info_dict + + @property + def name(self) -> str: # type: ignore + return f"<{' '.join(ty.name for ty in self.types)}>" + + @property + def arity(self) -> int: # type: ignore + return len(self.types) + + def convert( + self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] + ) -> t.Any: + len_type = len(self.types) + len_value = len(value) + + if len_value != len_type: + self.fail( + ngettext( + "{len_type} values are required, but {len_value} was given.", + "{len_type} values are required, but {len_value} were given.", + len_value, + ).format(len_type=len_type, len_value=len_value), + param=param, + ctx=ctx, + ) + + return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) + + +def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: + """Find the most appropriate :class:`ParamType` for the given Python + type. If the type isn't provided, it can be inferred from a default + value. + """ + guessed_type = False + + if ty is None and default is not None: + if isinstance(default, (tuple, list)): + # If the default is empty, ty will remain None and will + # return STRING. + if default: + item = default[0] + + # A tuple of tuples needs to detect the inner types. + # Can't call convert recursively because that would + # incorrectly unwind the tuple to a single type. + if isinstance(item, (tuple, list)): + ty = tuple(map(type, item)) + else: + ty = type(item) + else: + ty = type(default) + + guessed_type = True + + if isinstance(ty, tuple): + return Tuple(ty) + + if isinstance(ty, ParamType): + return ty + + if ty is str or ty is None: + return STRING + + if ty is int: + return INT + + if ty is float: + return FLOAT + + if ty is bool: + return BOOL + + if guessed_type: + return STRING + + if __debug__: + try: + if issubclass(ty, ParamType): + raise AssertionError( + f"Attempted to use an uninstantiated parameter type ({ty})." + ) + except TypeError: + # ty is an instance (correct), so issubclass fails. + pass + + return FuncParamType(ty) + + +#: A dummy parameter type that just does nothing. From a user's +#: perspective this appears to just be the same as `STRING` but +#: internally no string conversion takes place if the input was bytes. +#: This is usually useful when working with file paths as they can +#: appear in bytes and unicode. +#: +#: For path related uses the :class:`Path` type is a better choice but +#: there are situations where an unprocessed type is useful which is why +#: it is is provided. +#: +#: .. versionadded:: 4.0 +UNPROCESSED = UnprocessedParamType() + +#: A unicode string parameter type which is the implicit default. This +#: can also be selected by using ``str`` as type. +STRING = StringParamType() + +#: An integer parameter. This can also be selected by using ``int`` as +#: type. +INT = IntParamType() + +#: A floating point value parameter. This can also be selected by using +#: ``float`` as type. +FLOAT = FloatParamType() + +#: A boolean parameter. This is the default for boolean flags. This can +#: also be selected by using ``bool`` as a type. +BOOL = BoolParamType() + +#: A UUID parameter. +UUID = UUIDParameterType() diff --git a/backend/test/lib/python3.8/site-packages/click/utils.py b/backend/test/lib/python3.8/site-packages/click/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..d536434f0bd00cd6fd910c506f5b85a8e485b964 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/click/utils.py @@ -0,0 +1,624 @@ +import os +import re +import sys +import typing as t +from functools import update_wrapper +from types import ModuleType +from types import TracebackType + +from ._compat import _default_text_stderr +from ._compat import _default_text_stdout +from ._compat import _find_binary_writer +from ._compat import auto_wrap_for_ansi +from ._compat import binary_streams +from ._compat import open_stream +from ._compat import should_strip_ansi +from ._compat import strip_ansi +from ._compat import text_streams +from ._compat import WIN +from .globals import resolve_color_default + +if t.TYPE_CHECKING: + import typing_extensions as te + + P = te.ParamSpec("P") + +R = t.TypeVar("R") + + +def _posixify(name: str) -> str: + return "-".join(name.split()).lower() + + +def safecall(func: "t.Callable[P, R]") -> "t.Callable[P, t.Optional[R]]": + """Wraps a function so that it swallows exceptions.""" + + def wrapper(*args: "P.args", **kwargs: "P.kwargs") -> t.Optional[R]: + try: + return func(*args, **kwargs) + except Exception: + pass + return None + + return update_wrapper(wrapper, func) + + +def make_str(value: t.Any) -> str: + """Converts a value into a valid string.""" + if isinstance(value, bytes): + try: + return value.decode(sys.getfilesystemencoding()) + except UnicodeError: + return value.decode("utf-8", "replace") + return str(value) + + +def make_default_short_help(help: str, max_length: int = 45) -> str: + """Returns a condensed version of help string.""" + # Consider only the first paragraph. + paragraph_end = help.find("\n\n") + + if paragraph_end != -1: + help = help[:paragraph_end] + + # Collapse newlines, tabs, and spaces. + words = help.split() + + if not words: + return "" + + # The first paragraph started with a "no rewrap" marker, ignore it. + if words[0] == "\b": + words = words[1:] + + total_length = 0 + last_index = len(words) - 1 + + for i, word in enumerate(words): + total_length += len(word) + (i > 0) + + if total_length > max_length: # too long, truncate + break + + if word[-1] == ".": # sentence end, truncate without "..." + return " ".join(words[: i + 1]) + + if total_length == max_length and i != last_index: + break # not at sentence end, truncate with "..." + else: + return " ".join(words) # no truncation needed + + # Account for the length of the suffix. + total_length += len("...") + + # remove words until the length is short enough + while i > 0: + total_length -= len(words[i]) + (i > 0) + + if total_length <= max_length: + break + + i -= 1 + + return " ".join(words[:i]) + "..." + + +class LazyFile: + """A lazy file works like a regular file but it does not fully open + the file but it does perform some basic checks early to see if the + filename parameter does make sense. This is useful for safely opening + files for writing. + """ + + def __init__( + self, + filename: t.Union[str, "os.PathLike[str]"], + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + atomic: bool = False, + ): + self.name: str = os.fspath(filename) + self.mode = mode + self.encoding = encoding + self.errors = errors + self.atomic = atomic + self._f: t.Optional[t.IO[t.Any]] + self.should_close: bool + + if self.name == "-": + self._f, self.should_close = open_stream(filename, mode, encoding, errors) + else: + if "r" in mode: + # Open and close the file in case we're opening it for + # reading so that we can catch at least some errors in + # some cases early. + open(filename, mode).close() + self._f = None + self.should_close = True + + def __getattr__(self, name: str) -> t.Any: + return getattr(self.open(), name) + + def __repr__(self) -> str: + if self._f is not None: + return repr(self._f) + return f"<unopened file '{format_filename(self.name)}' {self.mode}>" + + def open(self) -> t.IO[t.Any]: + """Opens the file if it's not yet open. This call might fail with + a :exc:`FileError`. Not handling this error will produce an error + that Click shows. + """ + if self._f is not None: + return self._f + try: + rv, self.should_close = open_stream( + self.name, self.mode, self.encoding, self.errors, atomic=self.atomic + ) + except OSError as e: # noqa: E402 + from .exceptions import FileError + + raise FileError(self.name, hint=e.strerror) from e + self._f = rv + return rv + + def close(self) -> None: + """Closes the underlying file, no matter what.""" + if self._f is not None: + self._f.close() + + def close_intelligently(self) -> None: + """This function only closes the file if it was opened by the lazy + file wrapper. For instance this will never close stdin. + """ + if self.should_close: + self.close() + + def __enter__(self) -> "LazyFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + self.close_intelligently() + + def __iter__(self) -> t.Iterator[t.AnyStr]: + self.open() + return iter(self._f) # type: ignore + + +class KeepOpenFile: + def __init__(self, file: t.IO[t.Any]) -> None: + self._file: t.IO[t.Any] = file + + def __getattr__(self, name: str) -> t.Any: + return getattr(self._file, name) + + def __enter__(self) -> "KeepOpenFile": + return self + + def __exit__( + self, + exc_type: t.Optional[t.Type[BaseException]], + exc_value: t.Optional[BaseException], + tb: t.Optional[TracebackType], + ) -> None: + pass + + def __repr__(self) -> str: + return repr(self._file) + + def __iter__(self) -> t.Iterator[t.AnyStr]: + return iter(self._file) + + +def echo( + message: t.Optional[t.Any] = None, + file: t.Optional[t.IO[t.Any]] = None, + nl: bool = True, + err: bool = False, + color: t.Optional[bool] = None, +) -> None: + """Print a message and newline to stdout or a file. This should be + used instead of :func:`print` because it provides better support + for different data, files, and environments. + + Compared to :func:`print`, this does the following: + + - Ensures that the output encoding is not misconfigured on Linux. + - Supports Unicode in the Windows console. + - Supports writing to binary outputs, and supports writing bytes + to text outputs. + - Supports colors and styles on Windows. + - Removes ANSI color and style codes if the output does not look + like an interactive terminal. + - Always flushes the output. + + :param message: The string or bytes to output. Other objects are + converted to strings. + :param file: The file to write to. Defaults to ``stdout``. + :param err: Write to ``stderr`` instead of ``stdout``. + :param nl: Print a newline after the message. Enabled by default. + :param color: Force showing or hiding colors and other styles. By + default Click will remove color if the output does not look like + an interactive terminal. + + .. versionchanged:: 6.0 + Support Unicode output on the Windows console. Click does not + modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` + will still not support Unicode. + + .. versionchanged:: 4.0 + Added the ``color`` parameter. + + .. versionadded:: 3.0 + Added the ``err`` parameter. + + .. versionchanged:: 2.0 + Support colors on Windows if colorama is installed. + """ + if file is None: + if err: + file = _default_text_stderr() + else: + file = _default_text_stdout() + + # There are no standard streams attached to write to. For example, + # pythonw on Windows. + if file is None: + return + + # Convert non bytes/text into the native string type. + if message is not None and not isinstance(message, (str, bytes, bytearray)): + out: t.Optional[t.Union[str, bytes]] = str(message) + else: + out = message + + if nl: + out = out or "" + if isinstance(out, str): + out += "\n" + else: + out += b"\n" + + if not out: + file.flush() + return + + # If there is a message and the value looks like bytes, we manually + # need to find the binary stream and write the message in there. + # This is done separately so that most stream types will work as you + # would expect. Eg: you can write to StringIO for other cases. + if isinstance(out, (bytes, bytearray)): + binary_file = _find_binary_writer(file) + + if binary_file is not None: + file.flush() + binary_file.write(out) + binary_file.flush() + return + + # ANSI style code support. For no message or bytes, nothing happens. + # When outputting to a file instead of a terminal, strip codes. + else: + color = resolve_color_default(color) + + if should_strip_ansi(file, color): + out = strip_ansi(out) + elif WIN: + if auto_wrap_for_ansi is not None: + file = auto_wrap_for_ansi(file) # type: ignore + elif not color: + out = strip_ansi(out) + + file.write(out) # type: ignore + file.flush() + + +def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: + """Returns a system stream for byte processing. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + """ + opener = binary_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener() + + +def get_text_stream( + name: "te.Literal['stdin', 'stdout', 'stderr']", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", +) -> t.TextIO: + """Returns a system stream for text processing. This usually returns + a wrapped stream around a binary stream returned from + :func:`get_binary_stream` but it also can take shortcuts for already + correctly configured streams. + + :param name: the name of the stream to open. Valid names are ``'stdin'``, + ``'stdout'`` and ``'stderr'`` + :param encoding: overrides the detected default encoding. + :param errors: overrides the default error mode. + """ + opener = text_streams.get(name) + if opener is None: + raise TypeError(f"Unknown standard stream '{name}'") + return opener(encoding, errors) + + +def open_file( + filename: str, + mode: str = "r", + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + lazy: bool = False, + atomic: bool = False, +) -> t.IO[t.Any]: + """Open a file, with extra behavior to handle ``'-'`` to indicate + a standard stream, lazy open on write, and atomic write. Similar to + the behavior of the :class:`~click.File` param type. + + If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is + wrapped so that using it in a context manager will not close it. + This makes it possible to use the function without accidentally + closing a standard stream: + + .. code-block:: python + + with open_file(filename) as f: + ... + + :param filename: The name of the file to open, or ``'-'`` for + ``stdin``/``stdout``. + :param mode: The mode in which to open the file. + :param encoding: The encoding to decode or encode a file opened in + text mode. + :param errors: The error handling mode. + :param lazy: Wait to open the file until it is accessed. For read + mode, the file is temporarily opened to raise access errors + early, then closed until it is read again. + :param atomic: Write to a temporary file and replace the given file + on close. + + .. versionadded:: 3.0 + """ + if lazy: + return t.cast( + t.IO[t.Any], LazyFile(filename, mode, encoding, errors, atomic=atomic) + ) + + f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) + + if not should_close: + f = t.cast(t.IO[t.Any], KeepOpenFile(f)) + + return f + + +def format_filename( + filename: "t.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]", + shorten: bool = False, +) -> str: + """Format a filename as a string for display. Ensures the filename can be + displayed by replacing any invalid bytes or surrogate escapes in the name + with the replacement character ``�``. + + Invalid bytes or surrogate escapes will raise an error when written to a + stream with ``errors="strict". This will typically happen with ``stdout`` + when the locale is something like ``en_GB.UTF-8``. + + Many scenarios *are* safe to write surrogates though, due to PEP 538 and + PEP 540, including: + + - Writing to ``stderr``, which uses ``errors="backslashreplace"``. + - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens + stdout and stderr with ``errors="surrogateescape"``. + - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. + - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. + Python opens stdout and stderr with ``errors="surrogateescape"``. + + :param filename: formats a filename for UI display. This will also convert + the filename into unicode without failing. + :param shorten: this optionally shortens the filename to strip of the + path that leads up to it. + """ + if shorten: + filename = os.path.basename(filename) + else: + filename = os.fspath(filename) + + if isinstance(filename, bytes): + filename = filename.decode(sys.getfilesystemencoding(), "replace") + else: + filename = filename.encode("utf-8", "surrogateescape").decode( + "utf-8", "replace" + ) + + return filename + + +def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: + r"""Returns the config folder for the application. The default behavior + is to return whatever is most appropriate for the operating system. + + To give you an idea, for an app called ``"Foo Bar"``, something like + the following folders could be returned: + + Mac OS X: + ``~/Library/Application Support/Foo Bar`` + Mac OS X (POSIX): + ``~/.foo-bar`` + Unix: + ``~/.config/foo-bar`` + Unix (POSIX): + ``~/.foo-bar`` + Windows (roaming): + ``C:\Users\<user>\AppData\Roaming\Foo Bar`` + Windows (not roaming): + ``C:\Users\<user>\AppData\Local\Foo Bar`` + + .. versionadded:: 2.0 + + :param app_name: the application name. This should be properly capitalized + and can contain whitespace. + :param roaming: controls if the folder should be roaming or not on Windows. + Has no effect otherwise. + :param force_posix: if this is set to `True` then on any POSIX system the + folder will be stored in the home folder with a leading + dot instead of the XDG config home or darwin's + application support folder. + """ + if WIN: + key = "APPDATA" if roaming else "LOCALAPPDATA" + folder = os.environ.get(key) + if folder is None: + folder = os.path.expanduser("~") + return os.path.join(folder, app_name) + if force_posix: + return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) + if sys.platform == "darwin": + return os.path.join( + os.path.expanduser("~/Library/Application Support"), app_name + ) + return os.path.join( + os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), + _posixify(app_name), + ) + + +class PacifyFlushWrapper: + """This wrapper is used to catch and suppress BrokenPipeErrors resulting + from ``.flush()`` being called on broken pipe during the shutdown/final-GC + of the Python interpreter. Notably ``.flush()`` is always called on + ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any + other cleanup code, and the case where the underlying file is not a broken + pipe, all calls and attributes are proxied. + """ + + def __init__(self, wrapped: t.IO[t.Any]) -> None: + self.wrapped = wrapped + + def flush(self) -> None: + try: + self.wrapped.flush() + except OSError as e: + import errno + + if e.errno != errno.EPIPE: + raise + + def __getattr__(self, attr: str) -> t.Any: + return getattr(self.wrapped, attr) + + +def _detect_program_name( + path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None +) -> str: + """Determine the command used to run the program, for use in help + text. If a file or entry point was executed, the file name is + returned. If ``python -m`` was used to execute a module or package, + ``python -m name`` is returned. + + This doesn't try to be too precise, the goal is to give a concise + name for help text. Files are only shown as their name without the + path. ``python`` is only shown for modules, and the full path to + ``sys.executable`` is not shown. + + :param path: The Python file being executed. Python puts this in + ``sys.argv[0]``, which is used by default. + :param _main: The ``__main__`` module. This should only be passed + during internal testing. + + .. versionadded:: 8.0 + Based on command args detection in the Werkzeug reloader. + + :meta private: + """ + if _main is None: + _main = sys.modules["__main__"] + + if not path: + path = sys.argv[0] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + # It is set to "" inside a Shiv or PEX zipapp. + if getattr(_main, "__package__", None) in {None, ""} or ( + os.name == "nt" + and _main.__package__ == "" + and not os.path.exists(path) + and os.path.exists(f"{path}.exe") + ): + # Executed a file, like "python app.py". + return os.path.basename(path) + + # Executed a module, like "python -m example". + # Rewritten by Python from "-m script" to "/path/to/script.py". + # Need to look at main module to determine how it was executed. + py_module = t.cast(str, _main.__package__) + name = os.path.splitext(os.path.basename(path))[0] + + # A submodule like "example.cli". + if name != "__main__": + py_module = f"{py_module}.{name}" + + return f"python -m {py_module.lstrip('.')}" + + +def _expand_args( + args: t.Iterable[str], + *, + user: bool = True, + env: bool = True, + glob_recursive: bool = True, +) -> t.List[str]: + """Simulate Unix shell expansion with Python functions. + + See :func:`glob.glob`, :func:`os.path.expanduser`, and + :func:`os.path.expandvars`. + + This is intended for use on Windows, where the shell does not do any + expansion. It may not exactly match what a Unix shell would do. + + :param args: List of command line arguments to expand. + :param user: Expand user home directory. + :param env: Expand environment variables. + :param glob_recursive: ``**`` matches directories recursively. + + .. versionchanged:: 8.1 + Invalid glob patterns are treated as empty expansions rather + than raising an error. + + .. versionadded:: 8.0 + + :meta private: + """ + from glob import glob + + out = [] + + for arg in args: + if user: + arg = os.path.expanduser(arg) + + if env: + arg = os.path.expandvars(arg) + + try: + matches = glob(arg, recursive=glob_recursive) + except re.error: + matches = [] + + if not matches: + out.append(arg) + else: + out.extend(matches) + + return out diff --git a/backend/test/lib/python3.8/site-packages/dns/__init__.py b/backend/test/lib/python3.8/site-packages/dns/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a4249b9e7207639a0932109e5d38e7db0b732fca --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/__init__.py @@ -0,0 +1,70 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython DNS toolkit""" + +__all__ = [ + "asyncbackend", + "asyncquery", + "asyncresolver", + "dnssec", + "dnssecalgs", + "dnssectypes", + "e164", + "edns", + "entropy", + "exception", + "flags", + "immutable", + "inet", + "ipv4", + "ipv6", + "message", + "name", + "namedict", + "node", + "opcode", + "query", + "quic", + "rcode", + "rdata", + "rdataclass", + "rdataset", + "rdatatype", + "renderer", + "resolver", + "reversename", + "rrset", + "serial", + "set", + "tokenizer", + "transaction", + "tsig", + "tsigkeyring", + "ttl", + "rdtypes", + "update", + "version", + "versioned", + "wire", + "xfr", + "zone", + "zonetypes", + "zonefile", +] + +from dns.version import version as __version__ # noqa diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b6e5cfbc39daed5b7b1f6e04f700882eb470bf0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/_asyncbackend.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_asyncbackend.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4545a47631bd3deecc61a3da4943051e39a90ce6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_asyncbackend.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/_asyncio_backend.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_asyncio_backend.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33b901dc3c1971d71d2d322a4ae9fa5cb5563e35 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_asyncio_backend.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/_ddr.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_ddr.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38387676f303d1907ab070c8c1dfd9f106e46852 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_ddr.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/_immutable_ctx.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_immutable_ctx.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..accf9b2880316865b18db01a8d6636a33a14c2b8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_immutable_ctx.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/_trio_backend.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_trio_backend.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3db573d9c719da60733ed9d16ff74ffbfd503c7a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/_trio_backend.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/asyncbackend.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/asyncbackend.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a560934adcceb1776550a7157a8b69e42b5e7a8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/asyncbackend.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/asyncquery.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/asyncquery.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9db013e8b4f8bbf7f76a650695ef3633ac3faf0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/asyncquery.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/asyncresolver.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/asyncresolver.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b28f3bf02fc478c88014d208e68d34841be3b65 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/asyncresolver.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/dnssec.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/dnssec.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b3299ce2124c1cae32583805aaa478ce7b693b79 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/dnssec.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/dnssectypes.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/dnssectypes.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..025b420a95fb166cad90fe1e54ea1fc10ace1d55 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/dnssectypes.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/e164.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/e164.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f30bfc80c91b842fe798110ed2d7b52b728d7040 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/e164.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/edns.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/edns.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93c87a95a0cb6d397f0974cce0a946d14e7dc129 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/edns.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/entropy.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/entropy.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a216373b5f9ed6ef723b914c3b55105245f0a3a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/entropy.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/enum.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/enum.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f0d116f179e5547fd2c2b4e6ca46769291cb947d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/enum.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/exception.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/exception.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc0deba2806640e699f6cfadb99295c82b62bc20 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/exception.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/flags.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/flags.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9e336d6197e7c380c60026238f8aeb4faf97d4a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/flags.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/grange.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/grange.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0d6444b1dabe52a09c058163be4d32c4996259e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/grange.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/immutable.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/immutable.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f238bb1f95486854ba498c25124cbf1af63f2ca Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/immutable.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/inet.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/inet.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c51293728dc10538badb16405f0a2410b968c954 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/inet.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/ipv4.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/ipv4.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3150cb3c6c2532026121254c931dc1315c6bc704 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/ipv4.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/ipv6.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/ipv6.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1723512582e6f2d1af423cfc93a618821e8c175f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/ipv6.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/message.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/message.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a38bff309363b7bd5c217610c3d3ada1b0f7062 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/message.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/name.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/name.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee1e575d0b5fee2ccd9ffc9d9ca2a13cc2c205ed Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/name.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/namedict.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/namedict.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e021b3ccba466845fcf364dff612a0f9d51562f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/namedict.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/nameserver.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/nameserver.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..504f30cc215ab2fd89e318e447f8b9d4251822f0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/nameserver.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/node.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/node.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1523d441e69f217e744876a7981251da69ef603f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/node.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/opcode.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/opcode.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..18c23994924574115a4d0cec80d09eed9ce27556 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/opcode.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/query.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/query.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..03947ab362f879e34d0bd28b975c3248b270241e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/query.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/rcode.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rcode.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..585dbc1c194da02c13f85a64a4996a548f81c580 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rcode.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdata.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdata.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..61027346019a1be67fbe76db21f4501d8e81e49a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdata.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdataclass.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdataclass.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa23d20b4473cc0c615a5ef4c7431d9ac1879475 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdataclass.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdataset.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdataset.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..131e5a4b293b463f2a010982a4c757f9793396ce Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdataset.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdatatype.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdatatype.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b4583dd2f15310cb08c4f98c93cb04be7333a41 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rdatatype.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/renderer.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/renderer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f8ead0cba69f4d062b417cd552f697c7e5e080f2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/renderer.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/resolver.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/resolver.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c15fa78696f487582aacc5a67b34829c016d0857 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/resolver.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/reversename.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/reversename.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..43ee5932fb76be4b79870a3364aec69a8ebad5a9 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/reversename.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/rrset.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rrset.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e5a8c10c9b2b49ea12c38c54107d865b06e50ddf Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/rrset.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/serial.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/serial.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d85b30427f4625dd610605c6201796ffb38f3e4b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/serial.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/set.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/set.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..033b0cb4f86777cf2f8879a74cdde28832a938df Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/set.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/tokenizer.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/tokenizer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3529bfceb0ab5e0e18792f9396cbb8a91fab6bec Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/tokenizer.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/transaction.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/transaction.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81d282f517276171843fda44ba6c4e4bbd2f00de Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/transaction.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/tsig.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/tsig.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb0561897c5e9ce66c1786753ff63c15ea08c19f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/tsig.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/tsigkeyring.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/tsigkeyring.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e316c05fe9b8e71406f5239cef6f098bb8440e1d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/tsigkeyring.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/ttl.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/ttl.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ca8b42fa40b71c2542b7c5b67c66831fc2e98b2e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/ttl.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/update.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/update.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6c5525c44756d609f38a2a12d9095122389e463a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/update.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/version.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e5e12efae94297bc617baf52c8115b785e6352c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/version.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/versioned.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/versioned.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5d431135a78d8c97d6d3fd838bff54863e17ff7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/versioned.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/win32util.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/win32util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5a73fa1d6532fdee50932df68cddeea77a6b94d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/win32util.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/wire.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/wire.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e9f36a745e752ecf85abdd8f67da06470a09c59 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/wire.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/xfr.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/xfr.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..06bc3f63ad825010bef2a5b804b40f1ae9523f9e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/xfr.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/zone.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/zone.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70280bcc2d17de4141c8ee5ef13482df9f01de59 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/zone.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/zonefile.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/zonefile.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..860e9cf68072c99db64829ec4a90e17e3f542596 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/zonefile.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/__pycache__/zonetypes.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/__pycache__/zonetypes.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b56a152e8fdc4afd6103849bb2d53d49097ddc49 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/__pycache__/zonetypes.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/_asyncbackend.py b/backend/test/lib/python3.8/site-packages/dns/_asyncbackend.py new file mode 100644 index 0000000000000000000000000000000000000000..49f14fed682f6088fc506ce19978fbe62da1fafe --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/_asyncbackend.py @@ -0,0 +1,99 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# This is a nullcontext for both sync and async. 3.7 has a nullcontext, +# but it is only for sync use. + + +class NullContext: + def __init__(self, enter_result=None): + self.enter_result = enter_result + + def __enter__(self): + return self.enter_result + + def __exit__(self, exc_type, exc_value, traceback): + pass + + async def __aenter__(self): + return self.enter_result + + async def __aexit__(self, exc_type, exc_value, traceback): + pass + + +# These are declared here so backends can import them without creating +# circular dependencies with dns.asyncbackend. + + +class Socket: # pragma: no cover + async def close(self): + pass + + async def getpeername(self): + raise NotImplementedError + + async def getsockname(self): + raise NotImplementedError + + async def getpeercert(self, timeout): + raise NotImplementedError + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.close() + + +class DatagramSocket(Socket): # pragma: no cover + def __init__(self, family: int): + self.family = family + + async def sendto(self, what, destination, timeout): + raise NotImplementedError + + async def recvfrom(self, size, timeout): + raise NotImplementedError + + +class StreamSocket(Socket): # pragma: no cover + async def sendall(self, what, timeout): + raise NotImplementedError + + async def recv(self, size, timeout): + raise NotImplementedError + + +class NullTransport: + async def connect_tcp(self, host, port, timeout, local_address): + raise NotImplementedError + + +class Backend: # pragma: no cover + def name(self): + return "unknown" + + async def make_socket( + self, + af, + socktype, + proto=0, + source=None, + destination=None, + timeout=None, + ssl_context=None, + server_hostname=None, + ): + raise NotImplementedError + + def datagram_connection_required(self): + return False + + async def sleep(self, interval): + raise NotImplementedError + + def get_transport_class(self): + raise NotImplementedError + + async def wait_for(self, awaitable, timeout): + raise NotImplementedError diff --git a/backend/test/lib/python3.8/site-packages/dns/_asyncio_backend.py b/backend/test/lib/python3.8/site-packages/dns/_asyncio_backend.py new file mode 100644 index 0000000000000000000000000000000000000000..2631228ecdc95684f1d30980780f3300bf81de9b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/_asyncio_backend.py @@ -0,0 +1,275 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""asyncio library query support""" + +import asyncio +import socket +import sys + +import dns._asyncbackend +import dns.exception + +_is_win32 = sys.platform == "win32" + + +def _get_running_loop(): + try: + return asyncio.get_running_loop() + except AttributeError: # pragma: no cover + return asyncio.get_event_loop() + + +class _DatagramProtocol: + def __init__(self): + self.transport = None + self.recvfrom = None + + def connection_made(self, transport): + self.transport = transport + + def datagram_received(self, data, addr): + if self.recvfrom and not self.recvfrom.done(): + self.recvfrom.set_result((data, addr)) + + def error_received(self, exc): # pragma: no cover + if self.recvfrom and not self.recvfrom.done(): + self.recvfrom.set_exception(exc) + + def connection_lost(self, exc): + if self.recvfrom and not self.recvfrom.done(): + if exc is None: + # EOF we triggered. Is there a better way to do this? + try: + raise EOFError + except EOFError as e: + self.recvfrom.set_exception(e) + else: + self.recvfrom.set_exception(exc) + + def close(self): + self.transport.close() + + +async def _maybe_wait_for(awaitable, timeout): + if timeout is not None: + try: + return await asyncio.wait_for(awaitable, timeout) + except asyncio.TimeoutError: + raise dns.exception.Timeout(timeout=timeout) + else: + return await awaitable + + +class DatagramSocket(dns._asyncbackend.DatagramSocket): + def __init__(self, family, transport, protocol): + super().__init__(family) + self.transport = transport + self.protocol = protocol + + async def sendto(self, what, destination, timeout): # pragma: no cover + # no timeout for asyncio sendto + self.transport.sendto(what, destination) + return len(what) + + async def recvfrom(self, size, timeout): + # ignore size as there's no way I know to tell protocol about it + done = _get_running_loop().create_future() + try: + assert self.protocol.recvfrom is None + self.protocol.recvfrom = done + await _maybe_wait_for(done, timeout) + return done.result() + finally: + self.protocol.recvfrom = None + + async def close(self): + self.protocol.close() + + async def getpeername(self): + return self.transport.get_extra_info("peername") + + async def getsockname(self): + return self.transport.get_extra_info("sockname") + + async def getpeercert(self, timeout): + raise NotImplementedError + + +class StreamSocket(dns._asyncbackend.StreamSocket): + def __init__(self, af, reader, writer): + self.family = af + self.reader = reader + self.writer = writer + + async def sendall(self, what, timeout): + self.writer.write(what) + return await _maybe_wait_for(self.writer.drain(), timeout) + + async def recv(self, size, timeout): + return await _maybe_wait_for(self.reader.read(size), timeout) + + async def close(self): + self.writer.close() + + async def getpeername(self): + return self.writer.get_extra_info("peername") + + async def getsockname(self): + return self.writer.get_extra_info("sockname") + + async def getpeercert(self, timeout): + return self.writer.get_extra_info("peercert") + + +try: + import anyio + import httpcore + import httpcore._backends.anyio + import httpx + + _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend + _CoreAnyIOStream = httpcore._backends.anyio.AnyIOStream + + from dns.query import _compute_times, _expiration_for_this_attempt, _remaining + + class _NetworkBackend(_CoreAsyncNetworkBackend): + def __init__(self, resolver, local_port, bootstrap_address, family): + super().__init__() + self._local_port = local_port + self._resolver = resolver + self._bootstrap_address = bootstrap_address + self._family = family + if local_port != 0: + raise NotImplementedError( + "the asyncio transport for HTTPX cannot set the local port" + ) + + async def connect_tcp( + self, host, port, timeout, local_address, socket_options=None + ): # pylint: disable=signature-differs + addresses = [] + _, expiration = _compute_times(timeout) + if dns.inet.is_address(host): + addresses.append(host) + elif self._bootstrap_address is not None: + addresses.append(self._bootstrap_address) + else: + timeout = _remaining(expiration) + family = self._family + if local_address: + family = dns.inet.af_for_address(local_address) + answers = await self._resolver.resolve_name( + host, family=family, lifetime=timeout + ) + addresses = answers.addresses() + for address in addresses: + try: + attempt_expiration = _expiration_for_this_attempt(2.0, expiration) + timeout = _remaining(attempt_expiration) + with anyio.fail_after(timeout): + stream = await anyio.connect_tcp( + remote_host=address, + remote_port=port, + local_host=local_address, + ) + return _CoreAnyIOStream(stream) + except Exception: + pass + raise httpcore.ConnectError + + async def connect_unix_socket( + self, path, timeout, socket_options=None + ): # pylint: disable=signature-differs + raise NotImplementedError + + async def sleep(self, seconds): # pylint: disable=signature-differs + await anyio.sleep(seconds) + + class _HTTPTransport(httpx.AsyncHTTPTransport): + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + if resolver is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.asyncresolver + + resolver = dns.asyncresolver.Resolver() + super().__init__(*args, **kwargs) + self._pool._network_backend = _NetworkBackend( + resolver, local_port, bootstrap_address, family + ) + +except ImportError: + _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore + + +class Backend(dns._asyncbackend.Backend): + def name(self): + return "asyncio" + + async def make_socket( + self, + af, + socktype, + proto=0, + source=None, + destination=None, + timeout=None, + ssl_context=None, + server_hostname=None, + ): + if destination is None and socktype == socket.SOCK_DGRAM and _is_win32: + raise NotImplementedError( + "destinationless datagram sockets " + "are not supported by asyncio " + "on Windows" + ) + loop = _get_running_loop() + if socktype == socket.SOCK_DGRAM: + transport, protocol = await loop.create_datagram_endpoint( + _DatagramProtocol, + source, + family=af, + proto=proto, + remote_addr=destination, + ) + return DatagramSocket(af, transport, protocol) + elif socktype == socket.SOCK_STREAM: + if destination is None: + # This shouldn't happen, but we check to make code analysis software + # happier. + raise ValueError("destination required for stream sockets") + (r, w) = await _maybe_wait_for( + asyncio.open_connection( + destination[0], + destination[1], + ssl=ssl_context, + family=af, + proto=proto, + local_addr=source, + server_hostname=server_hostname, + ), + timeout, + ) + return StreamSocket(af, r, w) + raise NotImplementedError( + "unsupported socket " + f"type {socktype}" + ) # pragma: no cover + + async def sleep(self, interval): + await asyncio.sleep(interval) + + def datagram_connection_required(self): + return _is_win32 + + def get_transport_class(self): + return _HTTPTransport + + async def wait_for(self, awaitable, timeout): + return await _maybe_wait_for(awaitable, timeout) diff --git a/backend/test/lib/python3.8/site-packages/dns/_ddr.py b/backend/test/lib/python3.8/site-packages/dns/_ddr.py new file mode 100644 index 0000000000000000000000000000000000000000..bf5c11eb6d98168766c5df3b2201e298388fa49e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/_ddr.py @@ -0,0 +1,154 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license +# +# Support for Discovery of Designated Resolvers + +import socket +import time +from urllib.parse import urlparse + +import dns.asyncbackend +import dns.inet +import dns.name +import dns.nameserver +import dns.query +import dns.rdtypes.svcbbase + +# The special name of the local resolver when using DDR +_local_resolver_name = dns.name.from_text("_dns.resolver.arpa") + + +# +# Processing is split up into I/O independent and I/O dependent parts to +# make supporting sync and async versions easy. +# + + +class _SVCBInfo: + def __init__(self, bootstrap_address, port, hostname, nameservers): + self.bootstrap_address = bootstrap_address + self.port = port + self.hostname = hostname + self.nameservers = nameservers + + def ddr_check_certificate(self, cert): + """Verify that the _SVCBInfo's address is in the cert's subjectAltName (SAN)""" + for name, value in cert["subjectAltName"]: + if name == "IP Address" and value == self.bootstrap_address: + return True + return False + + def make_tls_context(self): + ssl = dns.query.ssl + ctx = ssl.create_default_context() + ctx.minimum_version = ssl.TLSVersion.TLSv1_2 + return ctx + + def ddr_tls_check_sync(self, lifetime): + ctx = self.make_tls_context() + expiration = time.time() + lifetime + with socket.create_connection( + (self.bootstrap_address, self.port), lifetime + ) as s: + with ctx.wrap_socket(s, server_hostname=self.hostname) as ts: + ts.settimeout(dns.query._remaining(expiration)) + ts.do_handshake() + cert = ts.getpeercert() + return self.ddr_check_certificate(cert) + + async def ddr_tls_check_async(self, lifetime, backend=None): + if backend is None: + backend = dns.asyncbackend.get_default_backend() + ctx = self.make_tls_context() + expiration = time.time() + lifetime + async with await backend.make_socket( + dns.inet.af_for_address(self.bootstrap_address), + socket.SOCK_STREAM, + 0, + None, + (self.bootstrap_address, self.port), + lifetime, + ctx, + self.hostname, + ) as ts: + cert = await ts.getpeercert(dns.query._remaining(expiration)) + return self.ddr_check_certificate(cert) + + +def _extract_nameservers_from_svcb(answer): + bootstrap_address = answer.nameserver + if not dns.inet.is_address(bootstrap_address): + return [] + infos = [] + for rr in answer.rrset.processing_order(): + nameservers = [] + param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.ALPN) + if param is None: + continue + alpns = set(param.ids) + host = rr.target.to_text(omit_final_dot=True) + port = None + param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.PORT) + if param is not None: + port = param.port + # For now we ignore address hints and address resolution and always use the + # bootstrap address + if b"h2" in alpns: + param = rr.params.get(dns.rdtypes.svcbbase.ParamKey.DOHPATH) + if param is None or not param.value.endswith(b"{?dns}"): + continue + path = param.value[:-6].decode() + if not path.startswith("/"): + path = "/" + path + if port is None: + port = 443 + url = f"https://{host}:{port}{path}" + # check the URL + try: + urlparse(url) + nameservers.append(dns.nameserver.DoHNameserver(url, bootstrap_address)) + except Exception: + # continue processing other ALPN types + pass + if b"dot" in alpns: + if port is None: + port = 853 + nameservers.append( + dns.nameserver.DoTNameserver(bootstrap_address, port, host) + ) + if b"doq" in alpns: + if port is None: + port = 853 + nameservers.append( + dns.nameserver.DoQNameserver(bootstrap_address, port, True, host) + ) + if len(nameservers) > 0: + infos.append(_SVCBInfo(bootstrap_address, port, host, nameservers)) + return infos + + +def _get_nameservers_sync(answer, lifetime): + """Return a list of TLS-validated resolver nameservers extracted from an SVCB + answer.""" + nameservers = [] + infos = _extract_nameservers_from_svcb(answer) + for info in infos: + try: + if info.ddr_tls_check_sync(lifetime): + nameservers.extend(info.nameservers) + except Exception: + pass + return nameservers + + +async def _get_nameservers_async(answer, lifetime): + """Return a list of TLS-validated resolver nameservers extracted from an SVCB + answer.""" + nameservers = [] + infos = _extract_nameservers_from_svcb(answer) + for info in infos: + try: + if await info.ddr_tls_check_async(lifetime): + nameservers.extend(info.nameservers) + except Exception: + pass + return nameservers diff --git a/backend/test/lib/python3.8/site-packages/dns/_immutable_ctx.py b/backend/test/lib/python3.8/site-packages/dns/_immutable_ctx.py new file mode 100644 index 0000000000000000000000000000000000000000..ae7a33bf3a5f92252a5191b23086fd62e431e785 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/_immutable_ctx.py @@ -0,0 +1,76 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# This implementation of the immutable decorator requires python >= +# 3.7, and is significantly more storage efficient when making classes +# with slots immutable. It's also faster. + +import contextvars +import inspect + +_in__init__ = contextvars.ContextVar("_immutable_in__init__", default=False) + + +class _Immutable: + """Immutable mixin class""" + + # We set slots to the empty list to say "we don't have any attributes". + # We do this so that if we're mixed in with a class with __slots__, we + # don't cause a __dict__ to be added which would waste space. + + __slots__ = () + + def __setattr__(self, name, value): + if _in__init__.get() is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__setattr__(name, value) + + def __delattr__(self, name): + if _in__init__.get() is not self: + raise TypeError("object doesn't support attribute assignment") + else: + super().__delattr__(name) + + +def _immutable_init(f): + def nf(*args, **kwargs): + previous = _in__init__.set(args[0]) + try: + # call the actual __init__ + f(*args, **kwargs) + finally: + _in__init__.reset(previous) + + nf.__signature__ = inspect.signature(f) + return nf + + +def immutable(cls): + if _Immutable in cls.__mro__: + # Some ancestor already has the mixin, so just make sure we keep + # following the __init__ protocol. + cls.__init__ = _immutable_init(cls.__init__) + if hasattr(cls, "__setstate__"): + cls.__setstate__ = _immutable_init(cls.__setstate__) + ncls = cls + else: + # Mixin the Immutable class and follow the __init__ protocol. + class ncls(_Immutable, cls): + # We have to do the __slots__ declaration here too! + __slots__ = () + + @_immutable_init + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + if hasattr(cls, "__setstate__"): + + @_immutable_init + def __setstate__(self, *args, **kwargs): + super().__setstate__(*args, **kwargs) + + # make ncls have the same name and module as cls + ncls.__name__ = cls.__name__ + ncls.__qualname__ = cls.__qualname__ + ncls.__module__ = cls.__module__ + return ncls diff --git a/backend/test/lib/python3.8/site-packages/dns/_trio_backend.py b/backend/test/lib/python3.8/site-packages/dns/_trio_backend.py new file mode 100644 index 0000000000000000000000000000000000000000..4d9fb820445a6b46ba3cdb23e0311d70c6fdc026 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/_trio_backend.py @@ -0,0 +1,246 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""trio async I/O library query support""" + +import socket + +import trio +import trio.socket # type: ignore + +import dns._asyncbackend +import dns.exception +import dns.inet + + +def _maybe_timeout(timeout): + if timeout is not None: + return trio.move_on_after(timeout) + else: + return dns._asyncbackend.NullContext() + + +# for brevity +_lltuple = dns.inet.low_level_address_tuple + +# pylint: disable=redefined-outer-name + + +class DatagramSocket(dns._asyncbackend.DatagramSocket): + def __init__(self, socket): + super().__init__(socket.family) + self.socket = socket + + async def sendto(self, what, destination, timeout): + with _maybe_timeout(timeout): + return await self.socket.sendto(what, destination) + raise dns.exception.Timeout( + timeout=timeout + ) # pragma: no cover lgtm[py/unreachable-statement] + + async def recvfrom(self, size, timeout): + with _maybe_timeout(timeout): + return await self.socket.recvfrom(size) + raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] + + async def close(self): + self.socket.close() + + async def getpeername(self): + return self.socket.getpeername() + + async def getsockname(self): + return self.socket.getsockname() + + async def getpeercert(self, timeout): + raise NotImplementedError + + +class StreamSocket(dns._asyncbackend.StreamSocket): + def __init__(self, family, stream, tls=False): + self.family = family + self.stream = stream + self.tls = tls + + async def sendall(self, what, timeout): + with _maybe_timeout(timeout): + return await self.stream.send_all(what) + raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] + + async def recv(self, size, timeout): + with _maybe_timeout(timeout): + return await self.stream.receive_some(size) + raise dns.exception.Timeout(timeout=timeout) # lgtm[py/unreachable-statement] + + async def close(self): + await self.stream.aclose() + + async def getpeername(self): + if self.tls: + return self.stream.transport_stream.socket.getpeername() + else: + return self.stream.socket.getpeername() + + async def getsockname(self): + if self.tls: + return self.stream.transport_stream.socket.getsockname() + else: + return self.stream.socket.getsockname() + + async def getpeercert(self, timeout): + if self.tls: + with _maybe_timeout(timeout): + await self.stream.do_handshake() + return self.stream.getpeercert() + else: + raise NotImplementedError + + +try: + import httpcore + import httpcore._backends.trio + import httpx + + _CoreAsyncNetworkBackend = httpcore.AsyncNetworkBackend + _CoreTrioStream = httpcore._backends.trio.TrioStream + + from dns.query import _compute_times, _expiration_for_this_attempt, _remaining + + class _NetworkBackend(_CoreAsyncNetworkBackend): + def __init__(self, resolver, local_port, bootstrap_address, family): + super().__init__() + self._local_port = local_port + self._resolver = resolver + self._bootstrap_address = bootstrap_address + self._family = family + + async def connect_tcp( + self, host, port, timeout, local_address, socket_options=None + ): # pylint: disable=signature-differs + addresses = [] + _, expiration = _compute_times(timeout) + if dns.inet.is_address(host): + addresses.append(host) + elif self._bootstrap_address is not None: + addresses.append(self._bootstrap_address) + else: + timeout = _remaining(expiration) + family = self._family + if local_address: + family = dns.inet.af_for_address(local_address) + answers = await self._resolver.resolve_name( + host, family=family, lifetime=timeout + ) + addresses = answers.addresses() + for address in addresses: + try: + af = dns.inet.af_for_address(address) + if local_address is not None or self._local_port != 0: + source = (local_address, self._local_port) + else: + source = None + destination = (address, port) + attempt_expiration = _expiration_for_this_attempt(2.0, expiration) + timeout = _remaining(attempt_expiration) + sock = await Backend().make_socket( + af, socket.SOCK_STREAM, 0, source, destination, timeout + ) + return _CoreTrioStream(sock.stream) + except Exception: + continue + raise httpcore.ConnectError + + async def connect_unix_socket( + self, path, timeout, socket_options=None + ): # pylint: disable=signature-differs + raise NotImplementedError + + async def sleep(self, seconds): # pylint: disable=signature-differs + await trio.sleep(seconds) + + class _HTTPTransport(httpx.AsyncHTTPTransport): + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + if resolver is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.asyncresolver + + resolver = dns.asyncresolver.Resolver() + super().__init__(*args, **kwargs) + self._pool._network_backend = _NetworkBackend( + resolver, local_port, bootstrap_address, family + ) + +except ImportError: + _HTTPTransport = dns._asyncbackend.NullTransport # type: ignore + + +class Backend(dns._asyncbackend.Backend): + def name(self): + return "trio" + + async def make_socket( + self, + af, + socktype, + proto=0, + source=None, + destination=None, + timeout=None, + ssl_context=None, + server_hostname=None, + ): + s = trio.socket.socket(af, socktype, proto) + stream = None + try: + if source: + await s.bind(_lltuple(source, af)) + if socktype == socket.SOCK_STREAM: + connected = False + with _maybe_timeout(timeout): + await s.connect(_lltuple(destination, af)) + connected = True + if not connected: + raise dns.exception.Timeout( + timeout=timeout + ) # lgtm[py/unreachable-statement] + except Exception: # pragma: no cover + s.close() + raise + if socktype == socket.SOCK_DGRAM: + return DatagramSocket(s) + elif socktype == socket.SOCK_STREAM: + stream = trio.SocketStream(s) + tls = False + if ssl_context: + tls = True + try: + stream = trio.SSLStream( + stream, ssl_context, server_hostname=server_hostname + ) + except Exception: # pragma: no cover + await stream.aclose() + raise + return StreamSocket(af, stream, tls) + raise NotImplementedError( + "unsupported socket " + f"type {socktype}" + ) # pragma: no cover + + async def sleep(self, interval): + await trio.sleep(interval) + + def get_transport_class(self): + return _HTTPTransport + + async def wait_for(self, awaitable, timeout): + with _maybe_timeout(timeout): + return await awaitable + raise dns.exception.Timeout( + timeout=timeout + ) # pragma: no cover lgtm[py/unreachable-statement] diff --git a/backend/test/lib/python3.8/site-packages/dns/asyncbackend.py b/backend/test/lib/python3.8/site-packages/dns/asyncbackend.py new file mode 100644 index 0000000000000000000000000000000000000000..07d50e1ee92ad6d987e20f8cb3cd405e02385c02 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/asyncbackend.py @@ -0,0 +1,101 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +from typing import Dict + +import dns.exception + +# pylint: disable=unused-import +from dns._asyncbackend import ( # noqa: F401 lgtm[py/unused-import] + Backend, + DatagramSocket, + Socket, + StreamSocket, +) + +# pylint: enable=unused-import + +_default_backend = None + +_backends: Dict[str, Backend] = {} + +# Allow sniffio import to be disabled for testing purposes +_no_sniffio = False + + +class AsyncLibraryNotFoundError(dns.exception.DNSException): + pass + + +def get_backend(name: str) -> Backend: + """Get the specified asynchronous backend. + + *name*, a ``str``, the name of the backend. Currently the "trio" + and "asyncio" backends are available. + + Raises NotImplementError if an unknown backend name is specified. + """ + # pylint: disable=import-outside-toplevel,redefined-outer-name + backend = _backends.get(name) + if backend: + return backend + if name == "trio": + import dns._trio_backend + + backend = dns._trio_backend.Backend() + elif name == "asyncio": + import dns._asyncio_backend + + backend = dns._asyncio_backend.Backend() + else: + raise NotImplementedError(f"unimplemented async backend {name}") + _backends[name] = backend + return backend + + +def sniff() -> str: + """Attempt to determine the in-use asynchronous I/O library by using + the ``sniffio`` module if it is available. + + Returns the name of the library, or raises AsyncLibraryNotFoundError + if the library cannot be determined. + """ + # pylint: disable=import-outside-toplevel + try: + if _no_sniffio: + raise ImportError + import sniffio + + try: + return sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + raise AsyncLibraryNotFoundError("sniffio cannot determine async library") + except ImportError: + import asyncio + + try: + asyncio.get_running_loop() + return "asyncio" + except RuntimeError: + raise AsyncLibraryNotFoundError("no async library detected") + + +def get_default_backend() -> Backend: + """Get the default backend, initializing it if necessary.""" + if _default_backend: + return _default_backend + + return set_default_backend(sniff()) + + +def set_default_backend(name: str) -> Backend: + """Set the default backend. + + It's not normally necessary to call this method, as + ``get_default_backend()`` will initialize the backend + appropriately in many cases. If ``sniffio`` is not installed, or + in testing situations, this function allows the backend to be set + explicitly. + """ + global _default_backend + _default_backend = get_backend(name) + return _default_backend diff --git a/backend/test/lib/python3.8/site-packages/dns/asyncquery.py b/backend/test/lib/python3.8/site-packages/dns/asyncquery.py new file mode 100644 index 0000000000000000000000000000000000000000..ecf9c1a5faace190cc7bbc900ef685aa06c9db24 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/asyncquery.py @@ -0,0 +1,758 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Talk to a DNS server.""" + +import base64 +import contextlib +import socket +import struct +import time +from typing import Any, Dict, Optional, Tuple, Union + +import dns.asyncbackend +import dns.exception +import dns.inet +import dns.message +import dns.name +import dns.quic +import dns.rcode +import dns.rdataclass +import dns.rdatatype +import dns.transaction +from dns._asyncbackend import NullContext +from dns.query import ( + BadResponse, + NoDOH, + NoDOQ, + UDPMode, + _compute_times, + _have_http2, + _matches_destination, + _remaining, + have_doh, + ssl, +) + +if have_doh: + import httpx + +# for brevity +_lltuple = dns.inet.low_level_address_tuple + + +def _source_tuple(af, address, port): + # Make a high level source tuple, or return None if address and port + # are both None + if address or port: + if address is None: + if af == socket.AF_INET: + address = "0.0.0.0" + elif af == socket.AF_INET6: + address = "::" + else: + raise NotImplementedError(f"unknown address family {af}") + return (address, port) + else: + return None + + +def _timeout(expiration, now=None): + if expiration is not None: + if not now: + now = time.time() + return max(expiration - now, 0) + else: + return None + + +async def send_udp( + sock: dns.asyncbackend.DatagramSocket, + what: Union[dns.message.Message, bytes], + destination: Any, + expiration: Optional[float] = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified UDP socket. + + *sock*, a ``dns.asyncbackend.DatagramSocket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where to send the query. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. The expiration value is meaningless for the asyncio backend, as + asyncio's transport sendto() never blocks. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + sent_time = time.time() + n = await sock.sendto(what, destination, _timeout(expiration, sent_time)) + return (n, sent_time) + + +async def receive_udp( + sock: dns.asyncbackend.DatagramSocket, + destination: Optional[Any] = None, + expiration: Optional[float] = None, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + request_mac: Optional[bytes] = b"", + ignore_trailing: bool = False, + raise_on_truncation: bool = False, +) -> Any: + """Read a DNS message from a UDP socket. + + *sock*, a ``dns.asyncbackend.DatagramSocket``. + + See :py:func:`dns.query.receive_udp()` for the documentation of the other + parameters, and exceptions. + + Returns a ``(dns.message.Message, float, tuple)`` tuple of the received message, the + received time, and the address where the message arrived from. + """ + + wire = b"" + while 1: + (wire, from_address) = await sock.recvfrom(65535, _timeout(expiration)) + if _matches_destination( + sock.family, from_address, destination, ignore_unexpected + ): + break + received_time = time.time() + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + raise_on_truncation=raise_on_truncation, + ) + return (r, received_time, from_address) + + +async def udp( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + sock: Optional[dns.asyncbackend.DatagramSocket] = None, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via UDP. + + *sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, + the socket to use for the query. If ``None``, the default, a + socket is created. Note that if a socket is provided, the + *source*, *source_port*, and *backend* are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.udp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + af = dns.inet.af_for_address(where) + destination = _lltuple((where, port), af) + if sock: + cm: contextlib.AbstractAsyncContextManager = NullContext(sock) + else: + if not backend: + backend = dns.asyncbackend.get_default_backend() + stuple = _source_tuple(af, source, source_port) + if backend.datagram_connection_required(): + dtuple = (where, port) + else: + dtuple = None + cm = await backend.make_socket(af, socket.SOCK_DGRAM, 0, stuple, dtuple) + async with cm as s: + await send_udp(s, wire, destination, expiration) + (r, received_time, _) = await receive_udp( + s, + destination, + expiration, + ignore_unexpected, + one_rr_per_rrset, + q.keyring, + q.mac, + ignore_trailing, + raise_on_truncation, + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + + +async def udp_with_fallback( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + udp_sock: Optional[dns.asyncbackend.DatagramSocket] = None, + tcp_sock: Optional[dns.asyncbackend.StreamSocket] = None, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> Tuple[dns.message.Message, bool]: + """Return the response to the query, trying UDP first and falling back + to TCP if UDP results in a truncated response. + + *udp_sock*, a ``dns.asyncbackend.DatagramSocket``, or ``None``, + the socket to use for the UDP query. If ``None``, the default, a + socket is created. Note that if a socket is provided the *source*, + *source_port*, and *backend* are ignored for the UDP query. + + *tcp_sock*, a ``dns.asyncbackend.StreamSocket``, or ``None``, the + socket to use for the TCP query. If ``None``, the default, a + socket is created. Note that if a socket is provided *where*, + *source*, *source_port*, and *backend* are ignored for the TCP query. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.udp_with_fallback()` for the documentation + of the other parameters, exceptions, and return type of this + method. + """ + try: + response = await udp( + q, + where, + timeout, + port, + source, + source_port, + ignore_unexpected, + one_rr_per_rrset, + ignore_trailing, + True, + udp_sock, + backend, + ) + return (response, False) + except dns.message.Truncated: + response = await tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + tcp_sock, + backend, + ) + return (response, True) + + +async def send_tcp( + sock: dns.asyncbackend.StreamSocket, + what: Union[dns.message.Message, bytes], + expiration: Optional[float] = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified TCP socket. + + *sock*, a ``dns.asyncbackend.StreamSocket``. + + See :py:func:`dns.query.send_tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + if isinstance(what, dns.message.Message): + wire = what.to_wire() + else: + wire = what + l = len(wire) + # copying the wire into tcpmsg is inefficient, but lets us + # avoid writev() or doing a short write that would get pushed + # onto the net + tcpmsg = struct.pack("!H", l) + wire + sent_time = time.time() + await sock.sendall(tcpmsg, _timeout(expiration, sent_time)) + return (len(tcpmsg), sent_time) + + +async def _read_exactly(sock, count, expiration): + """Read the specified number of bytes from stream. Keep trying until we + either get the desired amount, or we hit EOF. + """ + s = b"" + while count > 0: + n = await sock.recv(count, _timeout(expiration)) + if n == b"": + raise EOFError + count = count - len(n) + s = s + n + return s + + +async def receive_tcp( + sock: dns.asyncbackend.StreamSocket, + expiration: Optional[float] = None, + one_rr_per_rrset: bool = False, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + request_mac: Optional[bytes] = b"", + ignore_trailing: bool = False, +) -> Tuple[dns.message.Message, float]: + """Read a DNS message from a TCP socket. + + *sock*, a ``dns.asyncbackend.StreamSocket``. + + See :py:func:`dns.query.receive_tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + ldata = await _read_exactly(sock, 2, expiration) + (l,) = struct.unpack("!H", ldata) + wire = await _read_exactly(sock, l, expiration) + received_time = time.time() + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + return (r, received_time) + + +async def tcp( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: Optional[dns.asyncbackend.StreamSocket] = None, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via TCP. + + *sock*, a ``dns.asyncbacket.StreamSocket``, or ``None``, the + socket to use for the query. If ``None``, the default, a socket + is created. Note that if a socket is provided + *where*, *port*, *source*, *source_port*, and *backend* are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.tcp()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + if sock: + # Verify that the socket is connected, as if it's not connected, + # it's not writable, and the polling in send_tcp() will time out or + # hang forever. + await sock.getpeername() + cm: contextlib.AbstractAsyncContextManager = NullContext(sock) + else: + # These are simple (address, port) pairs, not family-dependent tuples + # you pass to low-level socket code. + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + cm = await backend.make_socket( + af, socket.SOCK_STREAM, 0, stuple, dtuple, timeout + ) + async with cm as s: + await send_tcp(s, wire, expiration) + (r, received_time) = await receive_tcp( + s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + + +async def tls( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: Optional[dns.asyncbackend.StreamSocket] = None, + backend: Optional[dns.asyncbackend.Backend] = None, + ssl_context: Optional[ssl.SSLContext] = None, + server_hostname: Optional[str] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via TLS. + + *sock*, an ``asyncbackend.StreamSocket``, or ``None``, the socket + to use for the query. If ``None``, the default, a socket is + created. Note that if a socket is provided, it must be a + connected SSL stream socket, and *where*, *port*, + *source*, *source_port*, *backend*, *ssl_context*, and *server_hostname* + are ignored. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.tls()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + (begin_time, expiration) = _compute_times(timeout) + if sock: + cm: contextlib.AbstractAsyncContextManager = NullContext(sock) + else: + if ssl_context is None: + # See the comment about ssl.create_default_context() in query.py + ssl_context = ssl.create_default_context() # lgtm[py/insecure-protocol] + ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 + if server_hostname is None: + ssl_context.check_hostname = False + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + if not backend: + backend = dns.asyncbackend.get_default_backend() + cm = await backend.make_socket( + af, + socket.SOCK_STREAM, + 0, + stuple, + dtuple, + timeout, + ssl_context, + server_hostname, + ) + async with cm as s: + timeout = _timeout(expiration) + response = await tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + s, + backend, + ) + end_time = time.time() + response.time = end_time - begin_time + return response + + +async def https( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 443, + source: Optional[str] = None, + source_port: int = 0, # pylint: disable=W0613 + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + client: Optional["httpx.AsyncClient"] = None, + path: str = "/dns-query", + post: bool = True, + verify: Union[bool, str] = True, + bootstrap_address: Optional[str] = None, + resolver: Optional["dns.asyncresolver.Resolver"] = None, + family: Optional[int] = socket.AF_UNSPEC, +) -> dns.message.Message: + """Return the response obtained after sending a query via DNS-over-HTTPS. + + *client*, a ``httpx.AsyncClient``. If provided, the client to use for + the query. + + Unlike the other dnspython async functions, a backend cannot be provided + in this function because httpx always auto-detects the async backend. + + See :py:func:`dns.query.https()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + if not have_doh: + raise NoDOH # pragma: no cover + if client and not isinstance(client, httpx.AsyncClient): + raise ValueError("session parameter must be an httpx.AsyncClient") + + wire = q.to_wire() + try: + af = dns.inet.af_for_address(where) + except ValueError: + af = None + transport = None + headers = {"accept": "application/dns-message"} + if af is not None and dns.inet.is_address(where): + if af == socket.AF_INET: + url = "https://{}:{}{}".format(where, port, path) + elif af == socket.AF_INET6: + url = "https://[{}]:{}{}".format(where, port, path) + else: + url = where + + backend = dns.asyncbackend.get_default_backend() + + if source is None: + local_address = None + local_port = 0 + else: + local_address = source + local_port = source_port + transport = backend.get_transport_class()( + local_address=local_address, + http1=True, + http2=_have_http2, + verify=verify, + local_port=local_port, + bootstrap_address=bootstrap_address, + resolver=resolver, + family=family, + ) + + if client: + cm: contextlib.AbstractAsyncContextManager = NullContext(client) + else: + cm = httpx.AsyncClient( + http1=True, http2=_have_http2, verify=verify, transport=transport + ) + + async with cm as the_client: + # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH + # GET and POST examples + if post: + headers.update( + { + "content-type": "application/dns-message", + "content-length": str(len(wire)), + } + ) + response = await backend.wait_for( + the_client.post(url, headers=headers, content=wire), timeout + ) + else: + wire = base64.urlsafe_b64encode(wire).rstrip(b"=") + twire = wire.decode() # httpx does a repr() if we give it bytes + response = await backend.wait_for( + the_client.get(url, headers=headers, params={"dns": twire}), timeout + ) + + # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH + # status codes + if response.status_code < 200 or response.status_code > 299: + raise ValueError( + "{} responded with status code {}" + "\nResponse body: {!r}".format( + where, response.status_code, response.content + ) + ) + r = dns.message.from_wire( + response.content, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = response.elapsed.total_seconds() + if not q.is_response(r): + raise BadResponse + return r + + +async def inbound_xfr( + where: str, + txn_manager: dns.transaction.TransactionManager, + query: Optional[dns.message.Message] = None, + port: int = 53, + timeout: Optional[float] = None, + lifetime: Optional[float] = None, + source: Optional[str] = None, + source_port: int = 0, + udp_mode: UDPMode = UDPMode.NEVER, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> None: + """Conduct an inbound transfer and apply it via a transaction from the + txn_manager. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.inbound_xfr()` for the documentation of + the other parameters, exceptions, and return type of this method. + """ + if query is None: + (query, serial) = dns.xfr.make_query(txn_manager) + else: + serial = dns.xfr.extract_serial_from_query(query) + rdtype = query.question[0].rdtype + is_ixfr = rdtype == dns.rdatatype.IXFR + origin = txn_manager.from_wire_origin() + wire = query.to_wire() + af = dns.inet.af_for_address(where) + stuple = _source_tuple(af, source, source_port) + dtuple = (where, port) + (_, expiration) = _compute_times(lifetime) + retry = True + while retry: + retry = False + if is_ixfr and udp_mode != UDPMode.NEVER: + sock_type = socket.SOCK_DGRAM + is_udp = True + else: + sock_type = socket.SOCK_STREAM + is_udp = False + if not backend: + backend = dns.asyncbackend.get_default_backend() + s = await backend.make_socket( + af, sock_type, 0, stuple, dtuple, _timeout(expiration) + ) + async with s: + if is_udp: + await s.sendto(wire, dtuple, _timeout(expiration)) + else: + tcpmsg = struct.pack("!H", len(wire)) + wire + await s.sendall(tcpmsg, expiration) + with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: + done = False + tsig_ctx = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or ( + expiration is not None and mexpiration > expiration + ): + mexpiration = expiration + if is_udp: + destination = _lltuple((where, port), af) + while True: + timeout = _timeout(mexpiration) + (rwire, from_address) = await s.recvfrom(65535, timeout) + if _matches_destination( + af, from_address, destination, True + ): + break + else: + ldata = await _read_exactly(s, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + rwire = await _read_exactly(s, l, mexpiration) + is_ixfr = rdtype == dns.rdatatype.IXFR + r = dns.message.from_wire( + rwire, + keyring=query.keyring, + request_mac=query.mac, + xfr=True, + origin=origin, + tsig_ctx=tsig_ctx, + multi=(not is_udp), + one_rr_per_rrset=is_ixfr, + ) + try: + done = inbound.process_message(r) + except dns.xfr.UseTCP: + assert is_udp # should not happen if we used TCP! + if udp_mode == UDPMode.ONLY: + raise + done = True + retry = True + udp_mode = UDPMode.NEVER + continue + tsig_ctx = r.tsig_ctx + if not retry and query.keyring and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") + + +async def quic( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + connection: Optional[dns.quic.AsyncQuicConnection] = None, + verify: Union[bool, str] = True, + backend: Optional[dns.asyncbackend.Backend] = None, + server_hostname: Optional[str] = None, +) -> dns.message.Message: + """Return the response obtained after sending an asynchronous query via + DNS-over-QUIC. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.query.quic()` for the documentation of the other + parameters, exceptions, and return type of this method. + """ + + if not dns.quic.have_quic: + raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover + + q.id = 0 + wire = q.to_wire() + the_connection: dns.quic.AsyncQuicConnection + if connection: + cfactory = dns.quic.null_factory + mfactory = dns.quic.null_factory + the_connection = connection + else: + (cfactory, mfactory) = dns.quic.factories_for_backend(backend) + + async with cfactory() as context: + async with mfactory( + context, verify_mode=verify, server_name=server_hostname + ) as the_manager: + if not connection: + the_connection = the_manager.connect(where, port, source, source_port) + (start, expiration) = _compute_times(timeout) + stream = await the_connection.make_stream(timeout) + async with stream: + await stream.send(wire, True) + wire = await stream.receive(_remaining(expiration)) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r diff --git a/backend/test/lib/python3.8/site-packages/dns/asyncresolver.py b/backend/test/lib/python3.8/site-packages/dns/asyncresolver.py new file mode 100644 index 0000000000000000000000000000000000000000..8f5e062a9ee5c1bf19acf363da7344b8d393e32a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/asyncresolver.py @@ -0,0 +1,475 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Asynchronous DNS stub resolver.""" + +import socket +import time +from typing import Any, Dict, List, Optional, Union + +import dns._ddr +import dns.asyncbackend +import dns.asyncquery +import dns.exception +import dns.name +import dns.query +import dns.rdataclass +import dns.rdatatype +import dns.resolver # lgtm[py/import-and-import-from] + +# import some resolver symbols for brevity +from dns.resolver import NXDOMAIN, NoAnswer, NoRootSOA, NotAbsolute + +# for indentation purposes below +_udp = dns.asyncquery.udp +_tcp = dns.asyncquery.tcp + + +class Resolver(dns.resolver.BaseResolver): + """Asynchronous DNS stub resolver.""" + + async def resolve( + self, + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + backend: Optional[dns.asyncbackend.Backend] = None, + ) -> dns.resolver.Answer: + """Query nameservers asynchronously to find the answer to the question. + + *backend*, a ``dns.asyncbackend.Backend``, or ``None``. If ``None``, + the default, then dnspython will use the default backend. + + See :py:func:`dns.resolver.Resolver.resolve()` for the + documentation of the other parameters, exceptions, and return + type of this method. + """ + + resolution = dns.resolver._Resolution( + self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search + ) + if not backend: + backend = dns.asyncbackend.get_default_backend() + start = time.time() + while True: + (request, answer) = resolution.next_request() + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + # cache hit! + return answer + assert request is not None # needed for type checking + done = False + while not done: + (nameserver, tcp, backoff) = resolution.next_nameserver() + if backoff: + await backend.sleep(backoff) + timeout = self._compute_timeout(start, lifetime, resolution.errors) + try: + response = await nameserver.async_query( + request, + timeout=timeout, + source=source, + source_port=source_port, + max_size=tcp, + backend=backend, + ) + except Exception as ex: + (_, done) = resolution.query_result(None, ex) + continue + (answer, done) = resolution.query_result(response, None) + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + return answer + + async def resolve_address( + self, ipaddr: str, *args: Any, **kwargs: Any + ) -> dns.resolver.Answer: + """Use an asynchronous resolver to run a reverse query for PTR + records. + + This utilizes the resolve() method to perform a PTR lookup on the + specified IP address. + + *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get + the PTR record for. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs["rdtype"] = dns.rdatatype.PTR + modified_kwargs["rdclass"] = dns.rdataclass.IN + return await self.resolve( + dns.reversename.from_address(ipaddr), *args, **modified_kwargs + ) + + async def resolve_name( + self, + name: Union[dns.name.Name, str], + family: int = socket.AF_UNSPEC, + **kwargs: Any, + ) -> dns.resolver.HostAnswers: + """Use an asynchronous resolver to query for address records. + + This utilizes the resolve() method to perform A and/or AAAA lookups on + the specified name. + + *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. + + *family*, an ``int``, the address family. If socket.AF_UNSPEC + (the default), both A and AAAA records will be retrieved. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs.pop("rdtype", None) + modified_kwargs["rdclass"] = dns.rdataclass.IN + + if family == socket.AF_INET: + v4 = await self.resolve(name, dns.rdatatype.A, **modified_kwargs) + return dns.resolver.HostAnswers.make(v4=v4) + elif family == socket.AF_INET6: + v6 = await self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) + return dns.resolver.HostAnswers.make(v6=v6) + elif family != socket.AF_UNSPEC: + raise NotImplementedError(f"unknown address family {family}") + + raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) + lifetime = modified_kwargs.pop("lifetime", None) + start = time.time() + v6 = await self.resolve( + name, + dns.rdatatype.AAAA, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + # Note that setting name ensures we query the same name + # for A as we did for AAAA. (This is just in case search lists + # are active by default in the resolver configuration and + # we might be talking to a server that says NXDOMAIN when it + # wants to say NOERROR no data. + name = v6.qname + v4 = await self.resolve( + name, + dns.rdatatype.A, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + answers = dns.resolver.HostAnswers.make( + v6=v6, v4=v4, add_empty=not raise_on_no_answer + ) + if not answers: + raise NoAnswer(response=v6.response) + return answers + + # pylint: disable=redefined-outer-name + + async def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: + """Determine the canonical name of *name*. + + The canonical name is the name the resolver uses for queries + after all CNAME and DNAME renamings have been applied. + + *name*, a ``dns.name.Name`` or ``str``, the query name. + + This method can raise any exception that ``resolve()`` can + raise, other than ``dns.resolver.NoAnswer`` and + ``dns.resolver.NXDOMAIN``. + + Returns a ``dns.name.Name``. + """ + try: + answer = await self.resolve(name, raise_on_no_answer=False) + canonical_name = answer.canonical_name + except dns.resolver.NXDOMAIN as e: + canonical_name = e.canonical_name + return canonical_name + + async def try_ddr(self, lifetime: float = 5.0) -> None: + """Try to update the resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + *lifetime*, a float, is the maximum time to spend attempting DDR. The default + is 5 seconds. + + If the SVCB query is successful and results in a non-empty list of nameservers, + then the resolver's nameservers are set to the returned servers in priority + order. + + The current implementation does not use any address hints from the SVCB record, + nor does it resolve addresses for the SCVB target name, rather it assumes that + the bootstrap nameserver will always be one of the addresses and uses it. + A future revision to the code may offer fuller support. The code verifies that + the bootstrap nameserver is in the Subject Alternative Name field of the + TLS certficate. + """ + try: + expiration = time.time() + lifetime + answer = await self.resolve( + dns._ddr._local_resolver_name, "svcb", lifetime=lifetime + ) + timeout = dns.query._remaining(expiration) + nameservers = await dns._ddr._get_nameservers_async(answer, timeout) + if len(nameservers) > 0: + self.nameservers = nameservers + except Exception: + pass + + +default_resolver = None + + +def get_default_resolver() -> Resolver: + """Get the default asynchronous resolver, initializing it if necessary.""" + if default_resolver is None: + reset_default_resolver() + assert default_resolver is not None + return default_resolver + + +def reset_default_resolver() -> None: + """Re-initialize default asynchronous resolver. + + Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX + systems) will be re-read immediately. + """ + + global default_resolver + default_resolver = Resolver() + + +async def resolve( + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> dns.resolver.Answer: + """Query nameservers asynchronously to find the answer to the question. + + This is a convenience function that uses the default resolver + object to make the query. + + See :py:func:`dns.asyncresolver.Resolver.resolve` for more + information on the parameters. + """ + + return await get_default_resolver().resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + backend, + ) + + +async def resolve_address( + ipaddr: str, *args: Any, **kwargs: Any +) -> dns.resolver.Answer: + """Use a resolver to run a reverse query for PTR records. + + See :py:func:`dns.asyncresolver.Resolver.resolve_address` for more + information on the parameters. + """ + + return await get_default_resolver().resolve_address(ipaddr, *args, **kwargs) + + +async def resolve_name( + name: Union[dns.name.Name, str], family: int = socket.AF_UNSPEC, **kwargs: Any +) -> dns.resolver.HostAnswers: + """Use a resolver to asynchronously query for address records. + + See :py:func:`dns.asyncresolver.Resolver.resolve_name` for more + information on the parameters. + """ + + return await get_default_resolver().resolve_name(name, family, **kwargs) + + +async def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name: + """Determine the canonical name of *name*. + + See :py:func:`dns.resolver.Resolver.canonical_name` for more + information on the parameters and possible exceptions. + """ + + return await get_default_resolver().canonical_name(name) + + +async def try_ddr(timeout: float = 5.0) -> None: + """Try to update the default resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + See :py:func:`dns.resolver.Resolver.try_ddr` for more information. + """ + return await get_default_resolver().try_ddr(timeout) + + +async def zone_for_name( + name: Union[dns.name.Name, str], + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + tcp: bool = False, + resolver: Optional[Resolver] = None, + backend: Optional[dns.asyncbackend.Backend] = None, +) -> dns.name.Name: + """Find the name of the zone which contains the specified name. + + See :py:func:`dns.resolver.Resolver.zone_for_name` for more + information on the parameters and possible exceptions. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, dns.name.root) + if resolver is None: + resolver = get_default_resolver() + if not name.is_absolute(): + raise NotAbsolute(name) + while True: + try: + answer = await resolver.resolve( + name, dns.rdatatype.SOA, rdclass, tcp, backend=backend + ) + assert answer.rrset is not None + if answer.rrset.name == name: + return name + # otherwise we were CNAMEd or DNAMEd and need to look higher + except (NXDOMAIN, NoAnswer): + pass + try: + name = name.parent() + except dns.name.NoParent: # pragma: no cover + raise NoRootSOA + + +async def make_resolver_at( + where: Union[dns.name.Name, str], + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Optional[Resolver] = None, +) -> Resolver: + """Make a stub resolver using the specified destination as the full resolver. + + *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the + full resolver. + + *port*, an ``int``, the port to use. If not specified, the default is 53. + + *family*, an ``int``, the address family to use. This parameter is used if + *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case + the first address returned by ``resolve_name()`` will be used, otherwise the + first address of the specified family will be used. + + *resolver*, a ``dns.asyncresolver.Resolver`` or ``None``, the resolver to use for + resolution of hostnames. If not specified, the default resolver will be used. + + Returns a ``dns.resolver.Resolver`` or raises an exception. + """ + if resolver is None: + resolver = get_default_resolver() + nameservers: List[Union[str, dns.nameserver.Nameserver]] = [] + if isinstance(where, str) and dns.inet.is_address(where): + nameservers.append(dns.nameserver.Do53Nameserver(where, port)) + else: + answers = await resolver.resolve_name(where, family) + for address in answers.addresses(): + nameservers.append(dns.nameserver.Do53Nameserver(address, port)) + res = dns.asyncresolver.Resolver(configure=False) + res.nameservers = nameservers + return res + + +async def resolve_at( + where: Union[dns.name.Name, str], + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + backend: Optional[dns.asyncbackend.Backend] = None, + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Optional[Resolver] = None, +) -> dns.resolver.Answer: + """Query nameservers to find the answer to the question. + + This is a convenience function that calls ``dns.asyncresolver.make_resolver_at()`` + to make a resolver, and then uses it to resolve the query. + + See ``dns.asyncresolver.Resolver.resolve`` for more information on the resolution + parameters, and ``dns.asyncresolver.make_resolver_at`` for information about the + resolver parameters *where*, *port*, *family*, and *resolver*. + + If making more than one query, it is more efficient to call + ``dns.asyncresolver.make_resolver_at()`` and then use that resolver for the queries + instead of calling ``resolve_at()`` multiple times. + """ + res = await make_resolver_at(where, port, family, resolver) + return await res.resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + backend, + ) diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssec.py b/backend/test/lib/python3.8/site-packages/dns/dnssec.py new file mode 100644 index 0000000000000000000000000000000000000000..2949f61977db17937e0a1895bb7a446f3d6a0af2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/dnssec.py @@ -0,0 +1,1222 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNSSEC-related functions and constants.""" + + +import base64 +import contextlib +import functools +import hashlib +import struct +import time +from datetime import datetime +from typing import Callable, Dict, List, Optional, Set, Tuple, Union, cast + +import dns.exception +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rrset +import dns.transaction +import dns.zone +from dns.dnssectypes import Algorithm, DSDigest, NSEC3Hash +from dns.exception import ( # pylint: disable=W0611 + AlgorithmKeyMismatch, + DeniedByPolicy, + UnsupportedAlgorithm, + ValidationFailure, +) +from dns.rdtypes.ANY.CDNSKEY import CDNSKEY +from dns.rdtypes.ANY.CDS import CDS +from dns.rdtypes.ANY.DNSKEY import DNSKEY +from dns.rdtypes.ANY.DS import DS +from dns.rdtypes.ANY.NSEC import NSEC, Bitmap +from dns.rdtypes.ANY.NSEC3PARAM import NSEC3PARAM +from dns.rdtypes.ANY.RRSIG import RRSIG, sigtime_to_posixtime +from dns.rdtypes.dnskeybase import Flag + +PublicKey = Union[ + "GenericPublicKey", + "rsa.RSAPublicKey", + "ec.EllipticCurvePublicKey", + "ed25519.Ed25519PublicKey", + "ed448.Ed448PublicKey", +] + +PrivateKey = Union[ + "GenericPrivateKey", + "rsa.RSAPrivateKey", + "ec.EllipticCurvePrivateKey", + "ed25519.Ed25519PrivateKey", + "ed448.Ed448PrivateKey", +] + +RRsetSigner = Callable[[dns.transaction.Transaction, dns.rrset.RRset], None] + + +def algorithm_from_text(text: str) -> Algorithm: + """Convert text into a DNSSEC algorithm value. + + *text*, a ``str``, the text to convert to into an algorithm value. + + Returns an ``int``. + """ + + return Algorithm.from_text(text) + + +def algorithm_to_text(value: Union[Algorithm, int]) -> str: + """Convert a DNSSEC algorithm value to text + + *value*, a ``dns.dnssec.Algorithm``. + + Returns a ``str``, the name of a DNSSEC algorithm. + """ + + return Algorithm.to_text(value) + + +def to_timestamp(value: Union[datetime, str, float, int]) -> int: + """Convert various format to a timestamp""" + if isinstance(value, datetime): + return int(value.timestamp()) + elif isinstance(value, str): + return sigtime_to_posixtime(value) + elif isinstance(value, float): + return int(value) + elif isinstance(value, int): + return value + else: + raise TypeError("Unsupported timestamp type") + + +def key_id(key: Union[DNSKEY, CDNSKEY]) -> int: + """Return the key id (a 16-bit number) for the specified key. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` + + Returns an ``int`` between 0 and 65535 + """ + + rdata = key.to_wire() + if key.algorithm == Algorithm.RSAMD5: + return (rdata[-3] << 8) + rdata[-2] + else: + total = 0 + for i in range(len(rdata) // 2): + total += (rdata[2 * i] << 8) + rdata[2 * i + 1] + if len(rdata) % 2 != 0: + total += rdata[len(rdata) - 1] << 8 + total += (total >> 16) & 0xFFFF + return total & 0xFFFF + + +class Policy: + def __init__(self): + pass + + def ok_to_sign(self, _: DNSKEY) -> bool: # pragma: no cover + return False + + def ok_to_validate(self, _: DNSKEY) -> bool: # pragma: no cover + return False + + def ok_to_create_ds(self, _: DSDigest) -> bool: # pragma: no cover + return False + + def ok_to_validate_ds(self, _: DSDigest) -> bool: # pragma: no cover + return False + + +class SimpleDeny(Policy): + def __init__(self, deny_sign, deny_validate, deny_create_ds, deny_validate_ds): + super().__init__() + self._deny_sign = deny_sign + self._deny_validate = deny_validate + self._deny_create_ds = deny_create_ds + self._deny_validate_ds = deny_validate_ds + + def ok_to_sign(self, key: DNSKEY) -> bool: + return key.algorithm not in self._deny_sign + + def ok_to_validate(self, key: DNSKEY) -> bool: + return key.algorithm not in self._deny_validate + + def ok_to_create_ds(self, algorithm: DSDigest) -> bool: + return algorithm not in self._deny_create_ds + + def ok_to_validate_ds(self, algorithm: DSDigest) -> bool: + return algorithm not in self._deny_validate_ds + + +rfc_8624_policy = SimpleDeny( + {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1, Algorithm.ECCGOST}, + {Algorithm.RSAMD5, Algorithm.DSA, Algorithm.DSANSEC3SHA1}, + {DSDigest.NULL, DSDigest.SHA1, DSDigest.GOST}, + {DSDigest.NULL}, +) + +allow_all_policy = SimpleDeny(set(), set(), set(), set()) + + +default_policy = rfc_8624_policy + + +def make_ds( + name: Union[dns.name.Name, str], + key: dns.rdata.Rdata, + algorithm: Union[DSDigest, str], + origin: Optional[dns.name.Name] = None, + policy: Optional[Policy] = None, + validating: bool = False, +) -> DS: + """Create a DS record for a DNSSEC key. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, + the key the DS is about. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, + then it will be made absolute using the specified origin. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + *validating*, a ``bool``. If ``True``, then policy is checked in + validating mode, i.e. "Is it ok to validate using this digest algorithm?". + Otherwise the policy is checked in creating mode, i.e. "Is it ok to create a DS with + this digest algorithm?". + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Raises ``DeniedByPolicy`` if the algorithm is denied by policy. + + Returns a ``dns.rdtypes.ANY.DS.DS`` + """ + + if policy is None: + policy = default_policy + try: + if isinstance(algorithm, str): + algorithm = DSDigest[algorithm.upper()] + except Exception: + raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) + if validating: + check = policy.ok_to_validate_ds + else: + check = policy.ok_to_create_ds + if not check(algorithm): + raise DeniedByPolicy + if not isinstance(key, (DNSKEY, CDNSKEY)): + raise ValueError("key is not a DNSKEY/CDNSKEY") + if algorithm == DSDigest.SHA1: + dshash = hashlib.sha1() + elif algorithm == DSDigest.SHA256: + dshash = hashlib.sha256() + elif algorithm == DSDigest.SHA384: + dshash = hashlib.sha384() + else: + raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) + + if isinstance(name, str): + name = dns.name.from_text(name, origin) + wire = name.canonicalize().to_wire() + assert wire is not None + dshash.update(wire) + dshash.update(key.to_wire(origin=origin)) + digest = dshash.digest() + + dsrdata = struct.pack("!HBB", key_id(key), key.algorithm, algorithm) + digest + ds = dns.rdata.from_wire( + dns.rdataclass.IN, dns.rdatatype.DS, dsrdata, 0, len(dsrdata) + ) + return cast(DS, ds) + + +def make_cds( + name: Union[dns.name.Name, str], + key: dns.rdata.Rdata, + algorithm: Union[DSDigest, str], + origin: Optional[dns.name.Name] = None, +) -> CDS: + """Create a CDS record for a DNSSEC key. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the DS record. + + *key*, a ``dns.rdtypes.ANY.DNSKEY.DNSKEY`` or ``dns.rdtypes.ANY.DNSKEY.CDNSKEY``, + the key the DS is about. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If *key* is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.rdtypes.ANY.DS.CDS`` + """ + + ds = make_ds(name, key, algorithm, origin) + return CDS( + rdclass=ds.rdclass, + rdtype=dns.rdatatype.CDS, + key_tag=ds.key_tag, + algorithm=ds.algorithm, + digest_type=ds.digest_type, + digest=ds.digest, + ) + + +def _find_candidate_keys( + keys: Dict[dns.name.Name, Union[dns.rdataset.Rdataset, dns.node.Node]], rrsig: RRSIG +) -> Optional[List[DNSKEY]]: + value = keys.get(rrsig.signer) + if isinstance(value, dns.node.Node): + rdataset = value.get_rdataset(dns.rdataclass.IN, dns.rdatatype.DNSKEY) + else: + rdataset = value + if rdataset is None: + return None + return [ + cast(DNSKEY, rd) + for rd in rdataset + if rd.algorithm == rrsig.algorithm + and key_id(rd) == rrsig.key_tag + and (rd.flags & Flag.ZONE) == Flag.ZONE # RFC 4034 2.1.1 + and rd.protocol == 3 # RFC 4034 2.1.2 + ] + + +def _get_rrname_rdataset( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], +) -> Tuple[dns.name.Name, dns.rdataset.Rdataset]: + if isinstance(rrset, tuple): + return rrset[0], rrset[1] + else: + return rrset.name, rrset + + +def _validate_signature(sig: bytes, data: bytes, key: DNSKEY) -> None: + public_cls = get_algorithm_cls_from_dnskey(key).public_cls + try: + public_key = public_cls.from_dnskey(key) + except ValueError: + raise ValidationFailure("invalid public key") + public_key.verify(sig, data) + + +def _validate_rrsig( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + rrsig: RRSIG, + keys: Dict[dns.name.Name, Union[dns.node.Node, dns.rdataset.Rdataset]], + origin: Optional[dns.name.Name] = None, + now: Optional[float] = None, + policy: Optional[Policy] = None, +) -> None: + """Validate an RRset against a single signature rdata, throwing an + exception if validation is not successful. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsig*, a ``dns.rdata.Rdata``, the signature to validate. + + *keys*, the key dictionary, used to find the DNSKEY associated + with a given name. The dictionary is keyed by a + ``dns.name.Name``, and has ``dns.node.Node`` or + ``dns.rdataset.Rdataset`` values. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative + names. + + *now*, a ``float`` or ``None``, the time, in seconds since the epoch, to + use as the current time when validating. If ``None``, the actual current + time is used. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + Raises ``ValidationFailure`` if the signature is expired, not yet valid, + the public key is invalid, the algorithm is unknown, the verification + fails, etc. + + Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by + dnspython but not implemented. + """ + + if policy is None: + policy = default_policy + + candidate_keys = _find_candidate_keys(keys, rrsig) + if candidate_keys is None: + raise ValidationFailure("unknown key") + + if now is None: + now = time.time() + if rrsig.expiration < now: + raise ValidationFailure("expired") + if rrsig.inception > now: + raise ValidationFailure("not yet valid") + + data = _make_rrsig_signature_data(rrset, rrsig, origin) + + for candidate_key in candidate_keys: + if not policy.ok_to_validate(candidate_key): + continue + try: + _validate_signature(rrsig.signature, data, candidate_key) + return + except (InvalidSignature, ValidationFailure): + # this happens on an individual validation failure + continue + # nothing verified -- raise failure: + raise ValidationFailure("verify failure") + + +def _validate( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + rrsigset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + keys: Dict[dns.name.Name, Union[dns.node.Node, dns.rdataset.Rdataset]], + origin: Optional[dns.name.Name] = None, + now: Optional[float] = None, + policy: Optional[Policy] = None, +) -> None: + """Validate an RRset against a signature RRset, throwing an exception + if none of the signatures validate. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsigset*, the signature RRset. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *keys*, the key dictionary, used to find the DNSKEY associated + with a given name. The dictionary is keyed by a + ``dns.name.Name``, and has ``dns.node.Node`` or + ``dns.rdataset.Rdataset`` values. + + *origin*, a ``dns.name.Name``, the origin to use for relative names; + defaults to None. + + *now*, an ``int`` or ``None``, the time, in seconds since the epoch, to + use as the current time when validating. If ``None``, the actual current + time is used. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + Raises ``ValidationFailure`` if the signature is expired, not yet valid, + the public key is invalid, the algorithm is unknown, the verification + fails, etc. + """ + + if policy is None: + policy = default_policy + + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root) + + if isinstance(rrset, tuple): + rrname = rrset[0] + else: + rrname = rrset.name + + if isinstance(rrsigset, tuple): + rrsigname = rrsigset[0] + rrsigrdataset = rrsigset[1] + else: + rrsigname = rrsigset.name + rrsigrdataset = rrsigset + + rrname = rrname.choose_relativity(origin) + rrsigname = rrsigname.choose_relativity(origin) + if rrname != rrsigname: + raise ValidationFailure("owner names do not match") + + for rrsig in rrsigrdataset: + if not isinstance(rrsig, RRSIG): + raise ValidationFailure("expected an RRSIG") + try: + _validate_rrsig(rrset, rrsig, keys, origin, now, policy) + return + except (ValidationFailure, UnsupportedAlgorithm): + pass + raise ValidationFailure("no RRSIGs validated") + + +def _sign( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + private_key: PrivateKey, + signer: dns.name.Name, + dnskey: DNSKEY, + inception: Optional[Union[datetime, str, int, float]] = None, + expiration: Optional[Union[datetime, str, int, float]] = None, + lifetime: Optional[int] = None, + verify: bool = False, + policy: Optional[Policy] = None, + origin: Optional[dns.name.Name] = None, +) -> RRSIG: + """Sign RRset using private key. + + *rrset*, the RRset to validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *private_key*, the private key to use for signing, a + ``cryptography.hazmat.primitives.asymmetric`` private key class applicable + for DNSSEC. + + *signer*, a ``dns.name.Name``, the Signer's name. + + *dnskey*, a ``DNSKEY`` matching ``private_key``. + + *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the + signature inception time. If ``None``, the current time is used. If a ``str``, the + format is "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX + epoch in text form; this is the same the RRSIG rdata's text form. + Values of type `int` or `float` are interpreted as seconds since the UNIX epoch. + + *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature + expiration time. If ``None``, the expiration time will be the inception time plus + the value of the *lifetime* parameter. See the description of *inception* above + for how the various parameter types are interpreted. + + *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This + parameter is only meaningful if *expiration* is ``None``. + + *verify*, a ``bool``. If set to ``True``, the signer will verify signatures + after they are created; the default is ``False``. + + *policy*, a ``dns.dnssec.Policy`` or ``None``. If ``None``, the default policy, + ``dns.dnssec.default_policy`` is used; this policy defaults to that of RFC 8624. + + *origin*, a ``dns.name.Name`` or ``None``. If ``None``, the default, then all + names in the rrset (including its owner name) must be absolute; otherwise the + specified origin will be used to make names absolute when signing. + + Raises ``DeniedByPolicy`` if the signature is denied by policy. + """ + + if policy is None: + policy = default_policy + if not policy.ok_to_sign(dnskey): + raise DeniedByPolicy + + if isinstance(rrset, tuple): + rdclass = rrset[1].rdclass + rdtype = rrset[1].rdtype + rrname = rrset[0] + original_ttl = rrset[1].ttl + else: + rdclass = rrset.rdclass + rdtype = rrset.rdtype + rrname = rrset.name + original_ttl = rrset.ttl + + if inception is not None: + rrsig_inception = to_timestamp(inception) + else: + rrsig_inception = int(time.time()) + + if expiration is not None: + rrsig_expiration = to_timestamp(expiration) + elif lifetime is not None: + rrsig_expiration = rrsig_inception + lifetime + else: + raise ValueError("expiration or lifetime must be specified") + + # Derelativize now because we need a correct labels length for the + # rrsig_template. + if origin is not None: + rrname = rrname.derelativize(origin) + labels = len(rrname) - 1 + + # Adjust labels appropriately for wildcards. + if rrname.is_wild(): + labels -= 1 + + rrsig_template = RRSIG( + rdclass=rdclass, + rdtype=dns.rdatatype.RRSIG, + type_covered=rdtype, + algorithm=dnskey.algorithm, + labels=labels, + original_ttl=original_ttl, + expiration=rrsig_expiration, + inception=rrsig_inception, + key_tag=key_id(dnskey), + signer=signer, + signature=b"", + ) + + data = dns.dnssec._make_rrsig_signature_data(rrset, rrsig_template, origin) + + if isinstance(private_key, GenericPrivateKey): + signing_key = private_key + else: + try: + private_cls = get_algorithm_cls_from_dnskey(dnskey) + signing_key = private_cls(key=private_key) + except UnsupportedAlgorithm: + raise TypeError("Unsupported key algorithm") + + signature = signing_key.sign(data, verify) + + return cast(RRSIG, rrsig_template.replace(signature=signature)) + + +def _make_rrsig_signature_data( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + rrsig: RRSIG, + origin: Optional[dns.name.Name] = None, +) -> bytes: + """Create signature rdata. + + *rrset*, the RRset to sign/validate. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *rrsig*, a ``dns.rdata.Rdata``, the signature to validate, or the + signature template used when signing. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to use for relative + names. + + Raises ``UnsupportedAlgorithm`` if the algorithm is recognized by + dnspython but not implemented. + """ + + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root) + + signer = rrsig.signer + if not signer.is_absolute(): + if origin is None: + raise ValidationFailure("relative RR name without an origin specified") + signer = signer.derelativize(origin) + + # For convenience, allow the rrset to be specified as a (name, + # rdataset) tuple as well as a proper rrset + rrname, rdataset = _get_rrname_rdataset(rrset) + + data = b"" + data += rrsig.to_wire(origin=signer)[:18] + data += rrsig.signer.to_digestable(signer) + + # Derelativize the name before considering labels. + if not rrname.is_absolute(): + if origin is None: + raise ValidationFailure("relative RR name without an origin specified") + rrname = rrname.derelativize(origin) + + name_len = len(rrname) + if rrname.is_wild() and rrsig.labels != name_len - 2: + raise ValidationFailure("wild owner name has wrong label length") + if name_len - 1 < rrsig.labels: + raise ValidationFailure("owner name longer than RRSIG labels") + elif rrsig.labels < name_len - 1: + suffix = rrname.split(rrsig.labels + 1)[1] + rrname = dns.name.from_text("*", suffix) + rrnamebuf = rrname.to_digestable() + rrfixed = struct.pack("!HHI", rdataset.rdtype, rdataset.rdclass, rrsig.original_ttl) + rdatas = [rdata.to_digestable(origin) for rdata in rdataset] + for rdata in sorted(rdatas): + data += rrnamebuf + data += rrfixed + rrlen = struct.pack("!H", len(rdata)) + data += rrlen + data += rdata + + return data + + +def _make_dnskey( + public_key: PublicKey, + algorithm: Union[int, str], + flags: int = Flag.ZONE, + protocol: int = 3, +) -> DNSKEY: + """Convert a public key to DNSKEY Rdata + + *public_key*, a ``PublicKey`` (``GenericPublicKey`` or + ``cryptography.hazmat.primitives.asymmetric``) to convert. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + *flags*: DNSKEY flags field as an integer. + + *protocol*: DNSKEY protocol field as an integer. + + Raises ``ValueError`` if the specified key algorithm parameters are not + unsupported, ``TypeError`` if the key type is unsupported, + `UnsupportedAlgorithm` if the algorithm is unknown and + `AlgorithmKeyMismatch` if the algorithm does not match the key type. + + Return DNSKEY ``Rdata``. + """ + + algorithm = Algorithm.make(algorithm) + + if isinstance(public_key, GenericPublicKey): + return public_key.to_dnskey(flags=flags, protocol=protocol) + else: + public_cls = get_algorithm_cls(algorithm).public_cls + return public_cls(key=public_key).to_dnskey(flags=flags, protocol=protocol) + + +def _make_cdnskey( + public_key: PublicKey, + algorithm: Union[int, str], + flags: int = Flag.ZONE, + protocol: int = 3, +) -> CDNSKEY: + """Convert a public key to CDNSKEY Rdata + + *public_key*, the public key to convert, a + ``cryptography.hazmat.primitives.asymmetric`` public key class applicable + for DNSSEC. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + *flags*: DNSKEY flags field as an integer. + + *protocol*: DNSKEY protocol field as an integer. + + Raises ``ValueError`` if the specified key algorithm parameters are not + unsupported, ``TypeError`` if the key type is unsupported, + `UnsupportedAlgorithm` if the algorithm is unknown and + `AlgorithmKeyMismatch` if the algorithm does not match the key type. + + Return CDNSKEY ``Rdata``. + """ + + dnskey = _make_dnskey(public_key, algorithm, flags, protocol) + + return CDNSKEY( + rdclass=dnskey.rdclass, + rdtype=dns.rdatatype.CDNSKEY, + flags=dnskey.flags, + protocol=dnskey.protocol, + algorithm=dnskey.algorithm, + key=dnskey.key, + ) + + +def nsec3_hash( + domain: Union[dns.name.Name, str], + salt: Optional[Union[str, bytes]], + iterations: int, + algorithm: Union[int, str], +) -> str: + """ + Calculate the NSEC3 hash, according to + https://tools.ietf.org/html/rfc5155#section-5 + + *domain*, a ``dns.name.Name`` or ``str``, the name to hash. + + *salt*, a ``str``, ``bytes``, or ``None``, the hash salt. If a + string, it is decoded as a hex string. + + *iterations*, an ``int``, the number of iterations. + + *algorithm*, a ``str`` or ``int``, the hash algorithm. + The only defined algorithm is SHA1. + + Returns a ``str``, the encoded NSEC3 hash. + """ + + b32_conversion = str.maketrans( + "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", "0123456789ABCDEFGHIJKLMNOPQRSTUV" + ) + + try: + if isinstance(algorithm, str): + algorithm = NSEC3Hash[algorithm.upper()] + except Exception: + raise ValueError("Wrong hash algorithm (only SHA1 is supported)") + + if algorithm != NSEC3Hash.SHA1: + raise ValueError("Wrong hash algorithm (only SHA1 is supported)") + + if salt is None: + salt_encoded = b"" + elif isinstance(salt, str): + if len(salt) % 2 == 0: + salt_encoded = bytes.fromhex(salt) + else: + raise ValueError("Invalid salt length") + else: + salt_encoded = salt + + if not isinstance(domain, dns.name.Name): + domain = dns.name.from_text(domain) + domain_encoded = domain.canonicalize().to_wire() + assert domain_encoded is not None + + digest = hashlib.sha1(domain_encoded + salt_encoded).digest() + for _ in range(iterations): + digest = hashlib.sha1(digest + salt_encoded).digest() + + output = base64.b32encode(digest).decode("utf-8") + output = output.translate(b32_conversion) + + return output + + +def make_ds_rdataset( + rrset: Union[dns.rrset.RRset, Tuple[dns.name.Name, dns.rdataset.Rdataset]], + algorithms: Set[Union[DSDigest, str]], + origin: Optional[dns.name.Name] = None, +) -> dns.rdataset.Rdataset: + """Create a DS record from DNSKEY/CDNSKEY/CDS. + + *rrset*, the RRset to create DS Rdataset for. This can be a + ``dns.rrset.RRset`` or a (``dns.name.Name``, ``dns.rdataset.Rdataset``) + tuple. + + *algorithms*, a set of ``str`` or ``int`` specifying the hash algorithms. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. If the RRset is a CDS, only digest + algorithms matching algorithms are accepted. + + *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if any of the algorithms are unknown and + ``ValueError`` if the given RRset is not usable. + + Returns a ``dns.rdataset.Rdataset`` + """ + + rrname, rdataset = _get_rrname_rdataset(rrset) + + if rdataset.rdtype not in ( + dns.rdatatype.DNSKEY, + dns.rdatatype.CDNSKEY, + dns.rdatatype.CDS, + ): + raise ValueError("rrset not a DNSKEY/CDNSKEY/CDS") + + _algorithms = set() + for algorithm in algorithms: + try: + if isinstance(algorithm, str): + algorithm = DSDigest[algorithm.upper()] + except Exception: + raise UnsupportedAlgorithm('unsupported algorithm "%s"' % algorithm) + _algorithms.add(algorithm) + + if rdataset.rdtype == dns.rdatatype.CDS: + res = [] + for rdata in cds_rdataset_to_ds_rdataset(rdataset): + if rdata.digest_type in _algorithms: + res.append(rdata) + if len(res) == 0: + raise ValueError("no acceptable CDS rdata found") + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + res = [] + for algorithm in _algorithms: + res.extend(dnskey_rdataset_to_cds_rdataset(rrname, rdataset, algorithm, origin)) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def cds_rdataset_to_ds_rdataset( + rdataset: dns.rdataset.Rdataset, +) -> dns.rdataset.Rdataset: + """Create a CDS record from DS. + + *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. + + Raises ``ValueError`` if the rdataset is not CDS. + + Returns a ``dns.rdataset.Rdataset`` + """ + + if rdataset.rdtype != dns.rdatatype.CDS: + raise ValueError("rdataset not a CDS") + res = [] + for rdata in rdataset: + res.append( + CDS( + rdclass=rdata.rdclass, + rdtype=dns.rdatatype.DS, + key_tag=rdata.key_tag, + algorithm=rdata.algorithm, + digest_type=rdata.digest_type, + digest=rdata.digest, + ) + ) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def dnskey_rdataset_to_cds_rdataset( + name: Union[dns.name.Name, str], + rdataset: dns.rdataset.Rdataset, + algorithm: Union[DSDigest, str], + origin: Optional[dns.name.Name] = None, +) -> dns.rdataset.Rdataset: + """Create a CDS record from DNSKEY/CDNSKEY. + + *name*, a ``dns.name.Name`` or ``str``, the owner name of the CDS record. + + *rdataset*, a ``dns.rdataset.Rdataset``, to create DS Rdataset for. + + *algorithm*, a ``str`` or ``int`` specifying the hash algorithm. + The currently supported hashes are "SHA1", "SHA256", and "SHA384". Case + does not matter for these strings. + + *origin*, a ``dns.name.Name`` or ``None``. If `key` is a relative name, + then it will be made absolute using the specified origin. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown or + ``ValueError`` if the rdataset is not DNSKEY/CDNSKEY. + + Returns a ``dns.rdataset.Rdataset`` + """ + + if rdataset.rdtype not in (dns.rdatatype.DNSKEY, dns.rdatatype.CDNSKEY): + raise ValueError("rdataset not a DNSKEY/CDNSKEY") + res = [] + for rdata in rdataset: + res.append(make_cds(name, rdata, algorithm, origin)) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def dnskey_rdataset_to_cdnskey_rdataset( + rdataset: dns.rdataset.Rdataset, +) -> dns.rdataset.Rdataset: + """Create a CDNSKEY record from DNSKEY. + + *rdataset*, a ``dns.rdataset.Rdataset``, to create CDNSKEY Rdataset for. + + Returns a ``dns.rdataset.Rdataset`` + """ + + if rdataset.rdtype != dns.rdatatype.DNSKEY: + raise ValueError("rdataset not a DNSKEY") + res = [] + for rdata in rdataset: + res.append( + CDNSKEY( + rdclass=rdataset.rdclass, + rdtype=rdataset.rdtype, + flags=rdata.flags, + protocol=rdata.protocol, + algorithm=rdata.algorithm, + key=rdata.key, + ) + ) + return dns.rdataset.from_rdata_list(rdataset.ttl, res) + + +def default_rrset_signer( + txn: dns.transaction.Transaction, + rrset: dns.rrset.RRset, + signer: dns.name.Name, + ksks: List[Tuple[PrivateKey, DNSKEY]], + zsks: List[Tuple[PrivateKey, DNSKEY]], + inception: Optional[Union[datetime, str, int, float]] = None, + expiration: Optional[Union[datetime, str, int, float]] = None, + lifetime: Optional[int] = None, + policy: Optional[Policy] = None, + origin: Optional[dns.name.Name] = None, +) -> None: + """Default RRset signer""" + + if rrset.rdtype in set( + [ + dns.rdatatype.RdataType.DNSKEY, + dns.rdatatype.RdataType.CDS, + dns.rdatatype.RdataType.CDNSKEY, + ] + ): + keys = ksks + else: + keys = zsks + + for private_key, dnskey in keys: + rrsig = dns.dnssec.sign( + rrset=rrset, + private_key=private_key, + dnskey=dnskey, + inception=inception, + expiration=expiration, + lifetime=lifetime, + signer=signer, + policy=policy, + origin=origin, + ) + txn.add(rrset.name, rrset.ttl, rrsig) + + +def sign_zone( + zone: dns.zone.Zone, + txn: Optional[dns.transaction.Transaction] = None, + keys: Optional[List[Tuple[PrivateKey, DNSKEY]]] = None, + add_dnskey: bool = True, + dnskey_ttl: Optional[int] = None, + inception: Optional[Union[datetime, str, int, float]] = None, + expiration: Optional[Union[datetime, str, int, float]] = None, + lifetime: Optional[int] = None, + nsec3: Optional[NSEC3PARAM] = None, + rrset_signer: Optional[RRsetSigner] = None, + policy: Optional[Policy] = None, +) -> None: + """Sign zone. + + *zone*, a ``dns.zone.Zone``, the zone to sign. + + *txn*, a ``dns.transaction.Transaction``, an optional transaction to use for + signing. + + *keys*, a list of (``PrivateKey``, ``DNSKEY``) tuples, to use for signing. KSK/ZSK + roles are assigned automatically if the SEP flag is used, otherwise all RRsets are + signed by all keys. + + *add_dnskey*, a ``bool``. If ``True``, the default, all specified DNSKEYs are + automatically added to the zone on signing. + + *dnskey_ttl*, a``int``, specifies the TTL for DNSKEY RRs. If not specified the TTL + of the existing DNSKEY RRset used or the TTL of the SOA RRset. + + *inception*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature + inception time. If ``None``, the current time is used. If a ``str``, the format is + "YYYYMMDDHHMMSS" or alternatively the number of seconds since the UNIX epoch in text + form; this is the same the RRSIG rdata's text form. Values of type `int` or `float` + are interpreted as seconds since the UNIX epoch. + + *expiration*, a ``datetime``, ``str``, ``int``, ``float`` or ``None``, the signature + expiration time. If ``None``, the expiration time will be the inception time plus + the value of the *lifetime* parameter. See the description of *inception* above for + how the various parameter types are interpreted. + + *lifetime*, an ``int`` or ``None``, the signature lifetime in seconds. This + parameter is only meaningful if *expiration* is ``None``. + + *nsec3*, a ``NSEC3PARAM`` Rdata, configures signing using NSEC3. Not yet + implemented. + + *rrset_signer*, a ``Callable``, an optional function for signing RRsets. The + function requires two arguments: transaction and RRset. If the not specified, + ``dns.dnssec.default_rrset_signer`` will be used. + + Returns ``None``. + """ + + ksks = [] + zsks = [] + + # if we have both KSKs and ZSKs, split by SEP flag. if not, sign all + # records with all keys + if keys: + for key in keys: + if key[1].flags & Flag.SEP: + ksks.append(key) + else: + zsks.append(key) + if not ksks: + ksks = keys + if not zsks: + zsks = keys + else: + keys = [] + + if txn: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(txn) + else: + cm = zone.writer() + + with cm as _txn: + if add_dnskey: + if dnskey_ttl is None: + dnskey = _txn.get(zone.origin, dns.rdatatype.DNSKEY) + if dnskey: + dnskey_ttl = dnskey.ttl + else: + soa = _txn.get(zone.origin, dns.rdatatype.SOA) + dnskey_ttl = soa.ttl + for _, dnskey in keys: + _txn.add(zone.origin, dnskey_ttl, dnskey) + + if nsec3: + raise NotImplementedError("Signing with NSEC3 not yet implemented") + else: + _rrset_signer = rrset_signer or functools.partial( + default_rrset_signer, + signer=zone.origin, + ksks=ksks, + zsks=zsks, + inception=inception, + expiration=expiration, + lifetime=lifetime, + policy=policy, + origin=zone.origin, + ) + return _sign_zone_nsec(zone, _txn, _rrset_signer) + + +def _sign_zone_nsec( + zone: dns.zone.Zone, + txn: dns.transaction.Transaction, + rrset_signer: Optional[RRsetSigner] = None, +) -> None: + """NSEC zone signer""" + + def _txn_add_nsec( + txn: dns.transaction.Transaction, + name: dns.name.Name, + next_secure: Optional[dns.name.Name], + rdclass: dns.rdataclass.RdataClass, + ttl: int, + rrset_signer: Optional[RRsetSigner] = None, + ) -> None: + """NSEC zone signer helper""" + mandatory_types = set( + [dns.rdatatype.RdataType.RRSIG, dns.rdatatype.RdataType.NSEC] + ) + node = txn.get_node(name) + if node and next_secure: + types = ( + set([rdataset.rdtype for rdataset in node.rdatasets]) | mandatory_types + ) + windows = Bitmap.from_rdtypes(list(types)) + rrset = dns.rrset.from_rdata( + name, + ttl, + NSEC( + rdclass=rdclass, + rdtype=dns.rdatatype.RdataType.NSEC, + next=next_secure, + windows=windows, + ), + ) + txn.add(rrset) + if rrset_signer: + rrset_signer(txn, rrset) + + rrsig_ttl = zone.get_soa().minimum + delegation = None + last_secure = None + + for name in sorted(txn.iterate_names()): + if delegation and name.is_subdomain(delegation): + # names below delegations are not secure + continue + elif txn.get(name, dns.rdatatype.NS) and name != zone.origin: + # inside delegation + delegation = name + else: + # outside delegation + delegation = None + + if rrset_signer: + node = txn.get_node(name) + if node: + for rdataset in node.rdatasets: + if rdataset.rdtype == dns.rdatatype.RRSIG: + # do not sign RRSIGs + continue + elif delegation and rdataset.rdtype != dns.rdatatype.DS: + # do not sign delegations except DS records + continue + else: + rrset = dns.rrset.from_rdata(name, rdataset.ttl, *rdataset) + rrset_signer(txn, rrset) + + # We need "is not None" as the empty name is False because its length is 0. + if last_secure is not None: + _txn_add_nsec(txn, last_secure, name, zone.rdclass, rrsig_ttl, rrset_signer) + last_secure = name + + if last_secure: + _txn_add_nsec( + txn, last_secure, zone.origin, zone.rdclass, rrsig_ttl, rrset_signer + ) + + +def _need_pyca(*args, **kwargs): + raise ImportError( + "DNSSEC validation requires python cryptography" + ) # pragma: no cover + + +try: + from cryptography.exceptions import InvalidSignature + from cryptography.hazmat.primitives.asymmetric import dsa # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import ec # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import ed448 # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import rsa # pylint: disable=W0611 + from cryptography.hazmat.primitives.asymmetric import ( # pylint: disable=W0611 + ed25519, + ) + + from dns.dnssecalgs import ( # pylint: disable=C0412 + get_algorithm_cls, + get_algorithm_cls_from_dnskey, + ) + from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey +except ImportError: # pragma: no cover + validate = _need_pyca + validate_rrsig = _need_pyca + sign = _need_pyca + make_dnskey = _need_pyca + make_cdnskey = _need_pyca + _have_pyca = False +else: + validate = _validate # type: ignore + validate_rrsig = _validate_rrsig # type: ignore + sign = _sign + make_dnskey = _make_dnskey + make_cdnskey = _make_cdnskey + _have_pyca = True + +### BEGIN generated Algorithm constants + +RSAMD5 = Algorithm.RSAMD5 +DH = Algorithm.DH +DSA = Algorithm.DSA +ECC = Algorithm.ECC +RSASHA1 = Algorithm.RSASHA1 +DSANSEC3SHA1 = Algorithm.DSANSEC3SHA1 +RSASHA1NSEC3SHA1 = Algorithm.RSASHA1NSEC3SHA1 +RSASHA256 = Algorithm.RSASHA256 +RSASHA512 = Algorithm.RSASHA512 +ECCGOST = Algorithm.ECCGOST +ECDSAP256SHA256 = Algorithm.ECDSAP256SHA256 +ECDSAP384SHA384 = Algorithm.ECDSAP384SHA384 +ED25519 = Algorithm.ED25519 +ED448 = Algorithm.ED448 +INDIRECT = Algorithm.INDIRECT +PRIVATEDNS = Algorithm.PRIVATEDNS +PRIVATEOID = Algorithm.PRIVATEOID + +### END generated Algorithm constants diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__init__.py b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d1ffd51907548778953ee656f85473a8774a840a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__init__.py @@ -0,0 +1,121 @@ +from typing import Dict, Optional, Tuple, Type, Union + +import dns.name + +try: + from dns.dnssecalgs.base import GenericPrivateKey + from dns.dnssecalgs.dsa import PrivateDSA, PrivateDSANSEC3SHA1 + from dns.dnssecalgs.ecdsa import PrivateECDSAP256SHA256, PrivateECDSAP384SHA384 + from dns.dnssecalgs.eddsa import PrivateED448, PrivateED25519 + from dns.dnssecalgs.rsa import ( + PrivateRSAMD5, + PrivateRSASHA1, + PrivateRSASHA1NSEC3SHA1, + PrivateRSASHA256, + PrivateRSASHA512, + ) + + _have_cryptography = True +except ImportError: + _have_cryptography = False + +from dns.dnssectypes import Algorithm +from dns.exception import UnsupportedAlgorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + +AlgorithmPrefix = Optional[Union[bytes, dns.name.Name]] + +algorithms: Dict[Tuple[Algorithm, AlgorithmPrefix], Type[GenericPrivateKey]] = {} +if _have_cryptography: + algorithms.update( + { + (Algorithm.RSAMD5, None): PrivateRSAMD5, + (Algorithm.DSA, None): PrivateDSA, + (Algorithm.RSASHA1, None): PrivateRSASHA1, + (Algorithm.DSANSEC3SHA1, None): PrivateDSANSEC3SHA1, + (Algorithm.RSASHA1NSEC3SHA1, None): PrivateRSASHA1NSEC3SHA1, + (Algorithm.RSASHA256, None): PrivateRSASHA256, + (Algorithm.RSASHA512, None): PrivateRSASHA512, + (Algorithm.ECDSAP256SHA256, None): PrivateECDSAP256SHA256, + (Algorithm.ECDSAP384SHA384, None): PrivateECDSAP384SHA384, + (Algorithm.ED25519, None): PrivateED25519, + (Algorithm.ED448, None): PrivateED448, + } + ) + + +def get_algorithm_cls( + algorithm: Union[int, str], prefix: AlgorithmPrefix = None +) -> Type[GenericPrivateKey]: + """Get Private Key class from Algorithm. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.dnssecalgs.GenericPrivateKey`` + """ + algorithm = Algorithm.make(algorithm) + cls = algorithms.get((algorithm, prefix)) + if cls: + return cls + raise UnsupportedAlgorithm( + 'algorithm "%s" not supported by dnspython' % Algorithm.to_text(algorithm) + ) + + +def get_algorithm_cls_from_dnskey(dnskey: DNSKEY) -> Type[GenericPrivateKey]: + """Get Private Key class from DNSKEY. + + *dnskey*, a ``DNSKEY`` to get Algorithm class for. + + Raises ``UnsupportedAlgorithm`` if the algorithm is unknown. + + Returns a ``dns.dnssecalgs.GenericPrivateKey`` + """ + prefix: AlgorithmPrefix = None + if dnskey.algorithm == Algorithm.PRIVATEDNS: + prefix, _ = dns.name.from_wire(dnskey.key, 0) + elif dnskey.algorithm == Algorithm.PRIVATEOID: + length = int(dnskey.key[0]) + prefix = dnskey.key[0 : length + 1] + return get_algorithm_cls(dnskey.algorithm, prefix) + + +def register_algorithm_cls( + algorithm: Union[int, str], + algorithm_cls: Type[GenericPrivateKey], + name: Optional[Union[dns.name.Name, str]] = None, + oid: Optional[bytes] = None, +) -> None: + """Register Algorithm Private Key class. + + *algorithm*, a ``str`` or ``int`` specifying the DNSKEY algorithm. + + *algorithm_cls*: A `GenericPrivateKey` class. + + *name*, an optional ``dns.name.Name`` or ``str``, for for PRIVATEDNS algorithms. + + *oid*: an optional BER-encoded `bytes` for PRIVATEOID algorithms. + + Raises ``ValueError`` if a name or oid is specified incorrectly. + """ + if not issubclass(algorithm_cls, GenericPrivateKey): + raise TypeError("Invalid algorithm class") + algorithm = Algorithm.make(algorithm) + prefix: AlgorithmPrefix = None + if algorithm == Algorithm.PRIVATEDNS: + if name is None: + raise ValueError("Name required for PRIVATEDNS algorithms") + if isinstance(name, str): + name = dns.name.from_text(name) + prefix = name + elif algorithm == Algorithm.PRIVATEOID: + if oid is None: + raise ValueError("OID required for PRIVATEOID algorithms") + prefix = bytes([len(oid)]) + oid + elif name: + raise ValueError("Name only supported for PRIVATEDNS algorithm") + elif oid: + raise ValueError("OID only supported for PRIVATEOID algorithm") + algorithms[(algorithm, prefix)] = algorithm_cls diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..135d363d381e941fd6be9acbb5d4a842189c826e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/base.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f463e4931844b007cfa93e3ab5796bd9901ed1d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/base.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e5cb16804de243ba0090a96d8499c82538849a4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/cryptography.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48532df0f81712b8d04f3d1eb90cb3232a9e7362 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/dsa.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81482db0ee81442147ffc466b4ddf47d0879def1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/ecdsa.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55d1fea857657b0ff33bb6265b11d4c43f76c52d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/eddsa.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c4ca71074df3f59b4b924e577163f5719134187 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/__pycache__/rsa.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/base.py b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/base.py new file mode 100644 index 0000000000000000000000000000000000000000..e990575a30ca19a9679ff2e3125f039d4c245b3e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/base.py @@ -0,0 +1,84 @@ +from abc import ABC, abstractmethod # pylint: disable=no-name-in-module +from typing import Any, Optional, Type + +import dns.rdataclass +import dns.rdatatype +from dns.dnssectypes import Algorithm +from dns.exception import AlgorithmKeyMismatch +from dns.rdtypes.ANY.DNSKEY import DNSKEY +from dns.rdtypes.dnskeybase import Flag + + +class GenericPublicKey(ABC): + algorithm: Algorithm + + @abstractmethod + def __init__(self, key: Any) -> None: + pass + + @abstractmethod + def verify(self, signature: bytes, data: bytes) -> None: + """Verify signed DNSSEC data""" + + @abstractmethod + def encode_key_bytes(self) -> bytes: + """Encode key as bytes for DNSKEY""" + + @classmethod + def _ensure_algorithm_key_combination(cls, key: DNSKEY) -> None: + if key.algorithm != cls.algorithm: + raise AlgorithmKeyMismatch + + def to_dnskey(self, flags: int = Flag.ZONE, protocol: int = 3) -> DNSKEY: + """Return public key as DNSKEY""" + return DNSKEY( + rdclass=dns.rdataclass.IN, + rdtype=dns.rdatatype.DNSKEY, + flags=flags, + protocol=protocol, + algorithm=self.algorithm, + key=self.encode_key_bytes(), + ) + + @classmethod + @abstractmethod + def from_dnskey(cls, key: DNSKEY) -> "GenericPublicKey": + """Create public key from DNSKEY""" + + @classmethod + @abstractmethod + def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": + """Create public key from PEM-encoded SubjectPublicKeyInfo as specified + in RFC 5280""" + + @abstractmethod + def to_pem(self) -> bytes: + """Return public-key as PEM-encoded SubjectPublicKeyInfo as specified + in RFC 5280""" + + +class GenericPrivateKey(ABC): + public_cls: Type[GenericPublicKey] + + @abstractmethod + def __init__(self, key: Any) -> None: + pass + + @abstractmethod + def sign(self, data: bytes, verify: bool = False) -> bytes: + """Sign DNSSEC data""" + + @abstractmethod + def public_key(self) -> "GenericPublicKey": + """Return public key instance""" + + @classmethod + @abstractmethod + def from_pem( + cls, private_pem: bytes, password: Optional[bytes] = None + ) -> "GenericPrivateKey": + """Create private key from PEM-encoded PKCS#8""" + + @abstractmethod + def to_pem(self, password: Optional[bytes] = None) -> bytes: + """Return private key as PEM-encoded PKCS#8""" diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/cryptography.py b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/cryptography.py new file mode 100644 index 0000000000000000000000000000000000000000..5a31a8123db92080e9976795b2350dacbdc65abb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/cryptography.py @@ -0,0 +1,68 @@ +from typing import Any, Optional, Type + +from cryptography.hazmat.primitives import serialization + +from dns.dnssecalgs.base import GenericPrivateKey, GenericPublicKey +from dns.exception import AlgorithmKeyMismatch + + +class CryptographyPublicKey(GenericPublicKey): + key: Any = None + key_cls: Any = None + + def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called + if self.key_cls is None: + raise TypeError("Undefined private key class") + if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type + key, self.key_cls + ): + raise AlgorithmKeyMismatch + self.key = key + + @classmethod + def from_pem(cls, public_pem: bytes) -> "GenericPublicKey": + key = serialization.load_pem_public_key(public_pem) + return cls(key=key) + + def to_pem(self) -> bytes: + return self.key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + + +class CryptographyPrivateKey(GenericPrivateKey): + key: Any = None + key_cls: Any = None + public_cls: Type[CryptographyPublicKey] + + def __init__(self, key: Any) -> None: # pylint: disable=super-init-not-called + if self.key_cls is None: + raise TypeError("Undefined private key class") + if not isinstance( # pylint: disable=isinstance-second-argument-not-valid-type + key, self.key_cls + ): + raise AlgorithmKeyMismatch + self.key = key + + def public_key(self) -> "CryptographyPublicKey": + return self.public_cls(key=self.key.public_key()) + + @classmethod + def from_pem( + cls, private_pem: bytes, password: Optional[bytes] = None + ) -> "GenericPrivateKey": + key = serialization.load_pem_private_key(private_pem, password=password) + return cls(key=key) + + def to_pem(self, password: Optional[bytes] = None) -> bytes: + encryption_algorithm: serialization.KeySerializationEncryption + if password: + encryption_algorithm = serialization.BestAvailableEncryption(password) + else: + encryption_algorithm = serialization.NoEncryption() + return self.key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=encryption_algorithm, + ) diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/dsa.py b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/dsa.py new file mode 100644 index 0000000000000000000000000000000000000000..0fe4690d39ec9f26caf1221146cf5309676e0173 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/dsa.py @@ -0,0 +1,101 @@ +import struct + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import dsa, utils + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicDSA(CryptographyPublicKey): + key: dsa.DSAPublicKey + key_cls = dsa.DSAPublicKey + algorithm = Algorithm.DSA + chosen_hash = hashes.SHA1() + + def verify(self, signature: bytes, data: bytes) -> None: + sig_r = signature[1:21] + sig_s = signature[21:] + sig = utils.encode_dss_signature( + int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") + ) + self.key.verify(sig, data, self.chosen_hash) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 2536, section 2.""" + pn = self.key.public_numbers() + dsa_t = (self.key.key_size // 8 - 64) // 8 + if dsa_t > 8: + raise ValueError("unsupported DSA key size") + octets = 64 + dsa_t * 8 + res = struct.pack("!B", dsa_t) + res += pn.parameter_numbers.q.to_bytes(20, "big") + res += pn.parameter_numbers.p.to_bytes(octets, "big") + res += pn.parameter_numbers.g.to_bytes(octets, "big") + res += pn.y.to_bytes(octets, "big") + return res + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicDSA": + cls._ensure_algorithm_key_combination(key) + keyptr = key.key + (t,) = struct.unpack("!B", keyptr[0:1]) + keyptr = keyptr[1:] + octets = 64 + t * 8 + dsa_q = keyptr[0:20] + keyptr = keyptr[20:] + dsa_p = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_g = keyptr[0:octets] + keyptr = keyptr[octets:] + dsa_y = keyptr[0:octets] + return cls( + key=dsa.DSAPublicNumbers( # type: ignore + int.from_bytes(dsa_y, "big"), + dsa.DSAParameterNumbers( + int.from_bytes(dsa_p, "big"), + int.from_bytes(dsa_q, "big"), + int.from_bytes(dsa_g, "big"), + ), + ).public_key(default_backend()), + ) + + +class PrivateDSA(CryptographyPrivateKey): + key: dsa.DSAPrivateKey + key_cls = dsa.DSAPrivateKey + public_cls = PublicDSA + + def sign(self, data: bytes, verify: bool = False) -> bytes: + """Sign using a private key per RFC 2536, section 3.""" + public_dsa_key = self.key.public_key() + if public_dsa_key.key_size > 1024: + raise ValueError("DSA key size overflow") + der_signature = self.key.sign(data, self.public_cls.chosen_hash) + dsa_r, dsa_s = utils.decode_dss_signature(der_signature) + dsa_t = (public_dsa_key.key_size // 8 - 64) // 8 + octets = 20 + signature = ( + struct.pack("!B", dsa_t) + + int.to_bytes(dsa_r, length=octets, byteorder="big") + + int.to_bytes(dsa_s, length=octets, byteorder="big") + ) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls, key_size: int) -> "PrivateDSA": + return cls( + key=dsa.generate_private_key(key_size=key_size), + ) + + +class PublicDSANSEC3SHA1(PublicDSA): + algorithm = Algorithm.DSANSEC3SHA1 + + +class PrivateDSANSEC3SHA1(PrivateDSA): + public_cls = PublicDSANSEC3SHA1 diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/ecdsa.py b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/ecdsa.py new file mode 100644 index 0000000000000000000000000000000000000000..a31d79f2b8ee461bc6ae736c13372b2013abd3c6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/ecdsa.py @@ -0,0 +1,89 @@ +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec, utils + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicECDSA(CryptographyPublicKey): + key: ec.EllipticCurvePublicKey + key_cls = ec.EllipticCurvePublicKey + algorithm: Algorithm + chosen_hash: hashes.HashAlgorithm + curve: ec.EllipticCurve + octets: int + + def verify(self, signature: bytes, data: bytes) -> None: + sig_r = signature[0 : self.octets] + sig_s = signature[self.octets :] + sig = utils.encode_dss_signature( + int.from_bytes(sig_r, "big"), int.from_bytes(sig_s, "big") + ) + self.key.verify(sig, data, ec.ECDSA(self.chosen_hash)) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 6605, section 4.""" + pn = self.key.public_numbers() + return pn.x.to_bytes(self.octets, "big") + pn.y.to_bytes(self.octets, "big") + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicECDSA": + cls._ensure_algorithm_key_combination(key) + ecdsa_x = key.key[0 : cls.octets] + ecdsa_y = key.key[cls.octets : cls.octets * 2] + return cls( + key=ec.EllipticCurvePublicNumbers( + curve=cls.curve, + x=int.from_bytes(ecdsa_x, "big"), + y=int.from_bytes(ecdsa_y, "big"), + ).public_key(default_backend()), + ) + + +class PrivateECDSA(CryptographyPrivateKey): + key: ec.EllipticCurvePrivateKey + key_cls = ec.EllipticCurvePrivateKey + public_cls = PublicECDSA + + def sign(self, data: bytes, verify: bool = False) -> bytes: + """Sign using a private key per RFC 6605, section 4.""" + der_signature = self.key.sign(data, ec.ECDSA(self.public_cls.chosen_hash)) + dsa_r, dsa_s = utils.decode_dss_signature(der_signature) + signature = int.to_bytes( + dsa_r, length=self.public_cls.octets, byteorder="big" + ) + int.to_bytes(dsa_s, length=self.public_cls.octets, byteorder="big") + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls) -> "PrivateECDSA": + return cls( + key=ec.generate_private_key( + curve=cls.public_cls.curve, backend=default_backend() + ), + ) + + +class PublicECDSAP256SHA256(PublicECDSA): + algorithm = Algorithm.ECDSAP256SHA256 + chosen_hash = hashes.SHA256() + curve = ec.SECP256R1() + octets = 32 + + +class PrivateECDSAP256SHA256(PrivateECDSA): + public_cls = PublicECDSAP256SHA256 + + +class PublicECDSAP384SHA384(PublicECDSA): + algorithm = Algorithm.ECDSAP384SHA384 + chosen_hash = hashes.SHA384() + curve = ec.SECP384R1() + octets = 48 + + +class PrivateECDSAP384SHA384(PrivateECDSA): + public_cls = PublicECDSAP384SHA384 diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/eddsa.py b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/eddsa.py new file mode 100644 index 0000000000000000000000000000000000000000..705053423998b0df1944f42617b0ed85655d94e1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/eddsa.py @@ -0,0 +1,65 @@ +from typing import Type + +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ed448, ed25519 + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicEDDSA(CryptographyPublicKey): + def verify(self, signature: bytes, data: bytes) -> None: + self.key.verify(signature, data) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 8080, section 3.""" + return self.key.public_bytes( + encoding=serialization.Encoding.Raw, format=serialization.PublicFormat.Raw + ) + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicEDDSA": + cls._ensure_algorithm_key_combination(key) + return cls( + key=cls.key_cls.from_public_bytes(key.key), + ) + + +class PrivateEDDSA(CryptographyPrivateKey): + public_cls: Type[PublicEDDSA] + + def sign(self, data: bytes, verify: bool = False) -> bytes: + """Sign using a private key per RFC 8080, section 4.""" + signature = self.key.sign(data) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls) -> "PrivateEDDSA": + return cls(key=cls.key_cls.generate()) + + +class PublicED25519(PublicEDDSA): + key: ed25519.Ed25519PublicKey + key_cls = ed25519.Ed25519PublicKey + algorithm = Algorithm.ED25519 + + +class PrivateED25519(PrivateEDDSA): + key: ed25519.Ed25519PrivateKey + key_cls = ed25519.Ed25519PrivateKey + public_cls = PublicED25519 + + +class PublicED448(PublicEDDSA): + key: ed448.Ed448PublicKey + key_cls = ed448.Ed448PublicKey + algorithm = Algorithm.ED448 + + +class PrivateED448(PrivateEDDSA): + key: ed448.Ed448PrivateKey + key_cls = ed448.Ed448PrivateKey + public_cls = PublicED448 diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/rsa.py b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/rsa.py new file mode 100644 index 0000000000000000000000000000000000000000..e95dcf1ddc45ad7c2731b258f5edd3abd34e5248 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/dnssecalgs/rsa.py @@ -0,0 +1,119 @@ +import math +import struct + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import padding, rsa + +from dns.dnssecalgs.cryptography import CryptographyPrivateKey, CryptographyPublicKey +from dns.dnssectypes import Algorithm +from dns.rdtypes.ANY.DNSKEY import DNSKEY + + +class PublicRSA(CryptographyPublicKey): + key: rsa.RSAPublicKey + key_cls = rsa.RSAPublicKey + algorithm: Algorithm + chosen_hash: hashes.HashAlgorithm + + def verify(self, signature: bytes, data: bytes) -> None: + self.key.verify(signature, data, padding.PKCS1v15(), self.chosen_hash) + + def encode_key_bytes(self) -> bytes: + """Encode a public key per RFC 3110, section 2.""" + pn = self.key.public_numbers() + _exp_len = math.ceil(int.bit_length(pn.e) / 8) + exp = int.to_bytes(pn.e, length=_exp_len, byteorder="big") + if _exp_len > 255: + exp_header = b"\0" + struct.pack("!H", _exp_len) + else: + exp_header = struct.pack("!B", _exp_len) + if pn.n.bit_length() < 512 or pn.n.bit_length() > 4096: + raise ValueError("unsupported RSA key length") + return exp_header + exp + pn.n.to_bytes((pn.n.bit_length() + 7) // 8, "big") + + @classmethod + def from_dnskey(cls, key: DNSKEY) -> "PublicRSA": + cls._ensure_algorithm_key_combination(key) + keyptr = key.key + (bytes_,) = struct.unpack("!B", keyptr[0:1]) + keyptr = keyptr[1:] + if bytes_ == 0: + (bytes_,) = struct.unpack("!H", keyptr[0:2]) + keyptr = keyptr[2:] + rsa_e = keyptr[0:bytes_] + rsa_n = keyptr[bytes_:] + return cls( + key=rsa.RSAPublicNumbers( + int.from_bytes(rsa_e, "big"), int.from_bytes(rsa_n, "big") + ).public_key(default_backend()) + ) + + +class PrivateRSA(CryptographyPrivateKey): + key: rsa.RSAPrivateKey + key_cls = rsa.RSAPrivateKey + public_cls = PublicRSA + default_public_exponent = 65537 + + def sign(self, data: bytes, verify: bool = False) -> bytes: + """Sign using a private key per RFC 3110, section 3.""" + signature = self.key.sign(data, padding.PKCS1v15(), self.public_cls.chosen_hash) + if verify: + self.public_key().verify(signature, data) + return signature + + @classmethod + def generate(cls, key_size: int) -> "PrivateRSA": + return cls( + key=rsa.generate_private_key( + public_exponent=cls.default_public_exponent, + key_size=key_size, + backend=default_backend(), + ) + ) + + +class PublicRSAMD5(PublicRSA): + algorithm = Algorithm.RSAMD5 + chosen_hash = hashes.MD5() + + +class PrivateRSAMD5(PrivateRSA): + public_cls = PublicRSAMD5 + + +class PublicRSASHA1(PublicRSA): + algorithm = Algorithm.RSASHA1 + chosen_hash = hashes.SHA1() + + +class PrivateRSASHA1(PrivateRSA): + public_cls = PublicRSASHA1 + + +class PublicRSASHA1NSEC3SHA1(PublicRSA): + algorithm = Algorithm.RSASHA1NSEC3SHA1 + chosen_hash = hashes.SHA1() + + +class PrivateRSASHA1NSEC3SHA1(PrivateRSA): + public_cls = PublicRSASHA1NSEC3SHA1 + + +class PublicRSASHA256(PublicRSA): + algorithm = Algorithm.RSASHA256 + chosen_hash = hashes.SHA256() + + +class PrivateRSASHA256(PrivateRSA): + public_cls = PublicRSASHA256 + + +class PublicRSASHA512(PublicRSA): + algorithm = Algorithm.RSASHA512 + chosen_hash = hashes.SHA512() + + +class PrivateRSASHA512(PrivateRSA): + public_cls = PublicRSASHA512 diff --git a/backend/test/lib/python3.8/site-packages/dns/dnssectypes.py b/backend/test/lib/python3.8/site-packages/dns/dnssectypes.py new file mode 100644 index 0000000000000000000000000000000000000000..02131e0adaeb85eb49351f4953c854023315fab9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/dnssectypes.py @@ -0,0 +1,71 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNSSEC-related types.""" + +# This is a separate file to avoid import circularity between dns.dnssec and +# the implementations of the DS and DNSKEY types. + +import dns.enum + + +class Algorithm(dns.enum.IntEnum): + RSAMD5 = 1 + DH = 2 + DSA = 3 + ECC = 4 + RSASHA1 = 5 + DSANSEC3SHA1 = 6 + RSASHA1NSEC3SHA1 = 7 + RSASHA256 = 8 + RSASHA512 = 10 + ECCGOST = 12 + ECDSAP256SHA256 = 13 + ECDSAP384SHA384 = 14 + ED25519 = 15 + ED448 = 16 + INDIRECT = 252 + PRIVATEDNS = 253 + PRIVATEOID = 254 + + @classmethod + def _maximum(cls): + return 255 + + +class DSDigest(dns.enum.IntEnum): + """DNSSEC Delegation Signer Digest Algorithm""" + + NULL = 0 + SHA1 = 1 + SHA256 = 2 + GOST = 3 + SHA384 = 4 + + @classmethod + def _maximum(cls): + return 255 + + +class NSEC3Hash(dns.enum.IntEnum): + """NSEC3 hash algorithm""" + + SHA1 = 1 + + @classmethod + def _maximum(cls): + return 255 diff --git a/backend/test/lib/python3.8/site-packages/dns/e164.py b/backend/test/lib/python3.8/site-packages/dns/e164.py new file mode 100644 index 0000000000000000000000000000000000000000..453736d40806838131569785f5eb2c65b8a2c310 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/e164.py @@ -0,0 +1,116 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS E.164 helpers.""" + +from typing import Iterable, Optional, Union + +import dns.exception +import dns.name +import dns.resolver + +#: The public E.164 domain. +public_enum_domain = dns.name.from_text("e164.arpa.") + + +def from_e164( + text: str, origin: Optional[dns.name.Name] = public_enum_domain +) -> dns.name.Name: + """Convert an E.164 number in textual form into a Name object whose + value is the ENUM domain name for that number. + + Non-digits in the text are ignored, i.e. "16505551212", + "+1.650.555.1212" and "1 (650) 555-1212" are all the same. + + *text*, a ``str``, is an E.164 number in textual form. + + *origin*, a ``dns.name.Name``, the domain in which the number + should be constructed. The default is ``e164.arpa.``. + + Returns a ``dns.name.Name``. + """ + + parts = [d for d in text if d.isdigit()] + parts.reverse() + return dns.name.from_text(".".join(parts), origin=origin) + + +def to_e164( + name: dns.name.Name, + origin: Optional[dns.name.Name] = public_enum_domain, + want_plus_prefix: bool = True, +) -> str: + """Convert an ENUM domain name into an E.164 number. + + Note that dnspython does not have any information about preferred + number formats within national numbering plans, so all numbers are + emitted as a simple string of digits, prefixed by a '+' (unless + *want_plus_prefix* is ``False``). + + *name* is a ``dns.name.Name``, the ENUM domain name. + + *origin* is a ``dns.name.Name``, a domain containing the ENUM + domain name. The name is relativized to this domain before being + converted to text. If ``None``, no relativization is done. + + *want_plus_prefix* is a ``bool``. If True, add a '+' to the beginning of + the returned number. + + Returns a ``str``. + + """ + if origin is not None: + name = name.relativize(origin) + dlabels = [d for d in name.labels if d.isdigit() and len(d) == 1] + if len(dlabels) != len(name.labels): + raise dns.exception.SyntaxError("non-digit labels in ENUM domain name") + dlabels.reverse() + text = b"".join(dlabels) + if want_plus_prefix: + text = b"+" + text + return text.decode() + + +def query( + number: str, + domains: Iterable[Union[dns.name.Name, str]], + resolver: Optional[dns.resolver.Resolver] = None, +) -> dns.resolver.Answer: + """Look for NAPTR RRs for the specified number in the specified domains. + + e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.']) + + *number*, a ``str`` is the number to look for. + + *domains* is an iterable containing ``dns.name.Name`` values. + + *resolver*, a ``dns.resolver.Resolver``, is the resolver to use. If + ``None``, the default resolver is used. + """ + + if resolver is None: + resolver = dns.resolver.get_default_resolver() + e_nx = dns.resolver.NXDOMAIN() + for domain in domains: + if isinstance(domain, str): + domain = dns.name.from_text(domain) + qname = dns.e164.from_e164(number, domain) + try: + return resolver.resolve(qname, "NAPTR") + except dns.resolver.NXDOMAIN as e: + e_nx += e + raise e_nx diff --git a/backend/test/lib/python3.8/site-packages/dns/edns.py b/backend/test/lib/python3.8/site-packages/dns/edns.py new file mode 100644 index 0000000000000000000000000000000000000000..f05baac4573c7fd857a865e0fa61aad283de8a75 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/edns.py @@ -0,0 +1,477 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""EDNS Options""" + +import math +import socket +import struct +from typing import Any, Dict, Optional, Union + +import dns.enum +import dns.inet +import dns.rdata +import dns.wire + + +class OptionType(dns.enum.IntEnum): + #: NSID + NSID = 3 + #: DAU + DAU = 5 + #: DHU + DHU = 6 + #: N3U + N3U = 7 + #: ECS (client-subnet) + ECS = 8 + #: EXPIRE + EXPIRE = 9 + #: COOKIE + COOKIE = 10 + #: KEEPALIVE + KEEPALIVE = 11 + #: PADDING + PADDING = 12 + #: CHAIN + CHAIN = 13 + #: EDE (extended-dns-error) + EDE = 15 + + @classmethod + def _maximum(cls): + return 65535 + + +class Option: + + """Base class for all EDNS option types.""" + + def __init__(self, otype: Union[OptionType, str]): + """Initialize an option. + + *otype*, a ``dns.edns.OptionType``, is the option type. + """ + self.otype = OptionType.make(otype) + + def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: + """Convert an option to wire format. + + Returns a ``bytes`` or ``None``. + + """ + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, otype: OptionType, parser: "dns.wire.Parser") -> "Option": + """Build an EDNS option object from wire format. + + *otype*, a ``dns.edns.OptionType``, is the option type. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restructed to the option length. + + Returns a ``dns.edns.Option``. + """ + raise NotImplementedError # pragma: no cover + + def _cmp(self, other): + """Compare an EDNS option with another option of the same type. + + Returns < 0 if < *other*, 0 if == *other*, and > 0 if > *other*. + """ + wire = self.to_wire() + owire = other.to_wire() + if wire == owire: + return 0 + if wire > owire: + return 1 + return -1 + + def __eq__(self, other): + if not isinstance(other, Option): + return False + if self.otype != other.otype: + return False + return self._cmp(other) == 0 + + def __ne__(self, other): + if not isinstance(other, Option): + return True + if self.otype != other.otype: + return True + return self._cmp(other) != 0 + + def __lt__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if not isinstance(other, Option) or self.otype != other.otype: + return NotImplemented + return self._cmp(other) > 0 + + def __str__(self): + return self.to_text() + + +class GenericOption(Option): # lgtm[py/missing-equals] + + """Generic Option Class + + This class is used for EDNS option types for which we have no better + implementation. + """ + + def __init__(self, otype: Union[OptionType, str], data: Union[bytes, str]): + super().__init__(otype) + self.data = dns.rdata.Rdata._as_bytes(data, True) + + def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: + if file: + file.write(self.data) + return None + else: + return self.data + + def to_text(self) -> str: + return "Generic %d" % self.otype + + @classmethod + def from_wire_parser( + cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" + ) -> Option: + return cls(otype, parser.get_remaining()) + + +class ECSOption(Option): # lgtm[py/missing-equals] + """EDNS Client Subnet (ECS, RFC7871)""" + + def __init__(self, address: str, srclen: Optional[int] = None, scopelen: int = 0): + """*address*, a ``str``, is the client address information. + + *srclen*, an ``int``, the source prefix length, which is the + leftmost number of bits of the address to be used for the + lookup. The default is 24 for IPv4 and 56 for IPv6. + + *scopelen*, an ``int``, the scope prefix length. This value + must be 0 in queries, and should be set in responses. + """ + + super().__init__(OptionType.ECS) + af = dns.inet.af_for_address(address) + + if af == socket.AF_INET6: + self.family = 2 + if srclen is None: + srclen = 56 + address = dns.rdata.Rdata._as_ipv6_address(address) + srclen = dns.rdata.Rdata._as_int(srclen, 0, 128) + scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 128) + elif af == socket.AF_INET: + self.family = 1 + if srclen is None: + srclen = 24 + address = dns.rdata.Rdata._as_ipv4_address(address) + srclen = dns.rdata.Rdata._as_int(srclen, 0, 32) + scopelen = dns.rdata.Rdata._as_int(scopelen, 0, 32) + else: # pragma: no cover (this will never happen) + raise ValueError("Bad address family") + + assert srclen is not None + self.address = address + self.srclen = srclen + self.scopelen = scopelen + + addrdata = dns.inet.inet_pton(af, address) + nbytes = int(math.ceil(srclen / 8.0)) + + # Truncate to srclen and pad to the end of the last octet needed + # See RFC section 6 + self.addrdata = addrdata[:nbytes] + nbits = srclen % 8 + if nbits != 0: + last = struct.pack("B", ord(self.addrdata[-1:]) & (0xFF << (8 - nbits))) + self.addrdata = self.addrdata[:-1] + last + + def to_text(self) -> str: + return "ECS {}/{} scope/{}".format(self.address, self.srclen, self.scopelen) + + @staticmethod + def from_text(text: str) -> Option: + """Convert a string into a `dns.edns.ECSOption` + + *text*, a `str`, the text form of the option. + + Returns a `dns.edns.ECSOption`. + + Examples: + + >>> import dns.edns + >>> + >>> # basic example + >>> dns.edns.ECSOption.from_text('1.2.3.4/24') + >>> + >>> # also understands scope + >>> dns.edns.ECSOption.from_text('1.2.3.4/24/32') + >>> + >>> # IPv6 + >>> dns.edns.ECSOption.from_text('2001:4b98::1/64/64') + >>> + >>> # it understands results from `dns.edns.ECSOption.to_text()` + >>> dns.edns.ECSOption.from_text('ECS 1.2.3.4/24/32') + """ + optional_prefix = "ECS" + tokens = text.split() + ecs_text = None + if len(tokens) == 1: + ecs_text = tokens[0] + elif len(tokens) == 2: + if tokens[0] != optional_prefix: + raise ValueError('could not parse ECS from "{}"'.format(text)) + ecs_text = tokens[1] + else: + raise ValueError('could not parse ECS from "{}"'.format(text)) + n_slashes = ecs_text.count("/") + if n_slashes == 1: + address, tsrclen = ecs_text.split("/") + tscope = "0" + elif n_slashes == 2: + address, tsrclen, tscope = ecs_text.split("/") + else: + raise ValueError('could not parse ECS from "{}"'.format(text)) + try: + scope = int(tscope) + except ValueError: + raise ValueError( + "invalid scope " + '"{}": scope must be an integer'.format(tscope) + ) + try: + srclen = int(tsrclen) + except ValueError: + raise ValueError( + "invalid srclen " + '"{}": srclen must be an integer'.format(tsrclen) + ) + return ECSOption(address, srclen, scope) + + def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: + value = ( + struct.pack("!HBB", self.family, self.srclen, self.scopelen) + self.addrdata + ) + if file: + file.write(value) + return None + else: + return value + + @classmethod + def from_wire_parser( + cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" + ) -> Option: + family, src, scope = parser.get_struct("!HBB") + addrlen = int(math.ceil(src / 8.0)) + prefix = parser.get_bytes(addrlen) + if family == 1: + pad = 4 - addrlen + addr = dns.ipv4.inet_ntoa(prefix + b"\x00" * pad) + elif family == 2: + pad = 16 - addrlen + addr = dns.ipv6.inet_ntoa(prefix + b"\x00" * pad) + else: + raise ValueError("unsupported family") + + return cls(addr, src, scope) + + +class EDECode(dns.enum.IntEnum): + OTHER = 0 + UNSUPPORTED_DNSKEY_ALGORITHM = 1 + UNSUPPORTED_DS_DIGEST_TYPE = 2 + STALE_ANSWER = 3 + FORGED_ANSWER = 4 + DNSSEC_INDETERMINATE = 5 + DNSSEC_BOGUS = 6 + SIGNATURE_EXPIRED = 7 + SIGNATURE_NOT_YET_VALID = 8 + DNSKEY_MISSING = 9 + RRSIGS_MISSING = 10 + NO_ZONE_KEY_BIT_SET = 11 + NSEC_MISSING = 12 + CACHED_ERROR = 13 + NOT_READY = 14 + BLOCKED = 15 + CENSORED = 16 + FILTERED = 17 + PROHIBITED = 18 + STALE_NXDOMAIN_ANSWER = 19 + NOT_AUTHORITATIVE = 20 + NOT_SUPPORTED = 21 + NO_REACHABLE_AUTHORITY = 22 + NETWORK_ERROR = 23 + INVALID_DATA = 24 + + @classmethod + def _maximum(cls): + return 65535 + + +class EDEOption(Option): # lgtm[py/missing-equals] + """Extended DNS Error (EDE, RFC8914)""" + + def __init__(self, code: Union[EDECode, str], text: Optional[str] = None): + """*code*, a ``dns.edns.EDECode`` or ``str``, the info code of the + extended error. + + *text*, a ``str`` or ``None``, specifying additional information about + the error. + """ + + super().__init__(OptionType.EDE) + + self.code = EDECode.make(code) + if text is not None and not isinstance(text, str): + raise ValueError("text must be string or None") + self.text = text + + def to_text(self) -> str: + output = f"EDE {self.code}" + if self.text is not None: + output += f": {self.text}" + return output + + def to_wire(self, file: Optional[Any] = None) -> Optional[bytes]: + value = struct.pack("!H", self.code) + if self.text is not None: + value += self.text.encode("utf8") + + if file: + file.write(value) + return None + else: + return value + + @classmethod + def from_wire_parser( + cls, otype: Union[OptionType, str], parser: "dns.wire.Parser" + ) -> Option: + code = EDECode.make(parser.get_uint16()) + text = parser.get_remaining() + + if text: + if text[-1] == 0: # text MAY be null-terminated + text = text[:-1] + btext = text.decode("utf8") + else: + btext = None + + return cls(code, btext) + + +_type_to_class: Dict[OptionType, Any] = { + OptionType.ECS: ECSOption, + OptionType.EDE: EDEOption, +} + + +def get_option_class(otype: OptionType) -> Any: + """Return the class for the specified option type. + + The GenericOption class is used if a more specific class is not + known. + """ + + cls = _type_to_class.get(otype) + if cls is None: + cls = GenericOption + return cls + + +def option_from_wire_parser( + otype: Union[OptionType, str], parser: "dns.wire.Parser" +) -> Option: + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restricted to the option length. + + Returns an instance of a subclass of ``dns.edns.Option``. + """ + otype = OptionType.make(otype) + cls = get_option_class(otype) + return cls.from_wire_parser(otype, parser) + + +def option_from_wire( + otype: Union[OptionType, str], wire: bytes, current: int, olen: int +) -> Option: + """Build an EDNS option object from wire format. + + *otype*, an ``int``, is the option type. + + *wire*, a ``bytes``, is the wire-format message. + + *current*, an ``int``, is the offset in *wire* of the beginning + of the rdata. + + *olen*, an ``int``, is the length of the wire-format option data + + Returns an instance of a subclass of ``dns.edns.Option``. + """ + parser = dns.wire.Parser(wire, current) + with parser.restrict_to(olen): + return option_from_wire_parser(otype, parser) + + +def register_type(implementation: Any, otype: OptionType) -> None: + """Register the implementation of an option type. + + *implementation*, a ``class``, is a subclass of ``dns.edns.Option``. + + *otype*, an ``int``, is the option type. + """ + + _type_to_class[otype] = implementation + + +### BEGIN generated OptionType constants + +NSID = OptionType.NSID +DAU = OptionType.DAU +DHU = OptionType.DHU +N3U = OptionType.N3U +ECS = OptionType.ECS +EXPIRE = OptionType.EXPIRE +COOKIE = OptionType.COOKIE +KEEPALIVE = OptionType.KEEPALIVE +PADDING = OptionType.PADDING +CHAIN = OptionType.CHAIN +EDE = OptionType.EDE + +### END generated OptionType constants diff --git a/backend/test/lib/python3.8/site-packages/dns/entropy.py b/backend/test/lib/python3.8/site-packages/dns/entropy.py new file mode 100644 index 0000000000000000000000000000000000000000..4dcdc6272ca3a670b1616f4c95f2a18b1803bc82 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/entropy.py @@ -0,0 +1,130 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import hashlib +import os +import random +import threading +import time +from typing import Any, Optional + + +class EntropyPool: + # This is an entropy pool for Python implementations that do not + # have a working SystemRandom. I'm not sure there are any, but + # leaving this code doesn't hurt anything as the library code + # is used if present. + + def __init__(self, seed: Optional[bytes] = None): + self.pool_index = 0 + self.digest: Optional[bytearray] = None + self.next_byte = 0 + self.lock = threading.Lock() + self.hash = hashlib.sha1() + self.hash_len = 20 + self.pool = bytearray(b"\0" * self.hash_len) + if seed is not None: + self._stir(seed) + self.seeded = True + self.seed_pid = os.getpid() + else: + self.seeded = False + self.seed_pid = 0 + + def _stir(self, entropy: bytes) -> None: + for c in entropy: + if self.pool_index == self.hash_len: + self.pool_index = 0 + b = c & 0xFF + self.pool[self.pool_index] ^= b + self.pool_index += 1 + + def stir(self, entropy: bytes) -> None: + with self.lock: + self._stir(entropy) + + def _maybe_seed(self) -> None: + if not self.seeded or self.seed_pid != os.getpid(): + try: + seed = os.urandom(16) + except Exception: # pragma: no cover + try: + with open("/dev/urandom", "rb", 0) as r: + seed = r.read(16) + except Exception: + seed = str(time.time()).encode() + self.seeded = True + self.seed_pid = os.getpid() + self.digest = None + seed = bytearray(seed) + self._stir(seed) + + def random_8(self) -> int: + with self.lock: + self._maybe_seed() + if self.digest is None or self.next_byte == self.hash_len: + self.hash.update(bytes(self.pool)) + self.digest = bytearray(self.hash.digest()) + self._stir(self.digest) + self.next_byte = 0 + value = self.digest[self.next_byte] + self.next_byte += 1 + return value + + def random_16(self) -> int: + return self.random_8() * 256 + self.random_8() + + def random_32(self) -> int: + return self.random_16() * 65536 + self.random_16() + + def random_between(self, first: int, last: int) -> int: + size = last - first + 1 + if size > 4294967296: + raise ValueError("too big") + if size > 65536: + rand = self.random_32 + max = 4294967295 + elif size > 256: + rand = self.random_16 + max = 65535 + else: + rand = self.random_8 + max = 255 + return first + size * rand() // (max + 1) + + +pool = EntropyPool() + +system_random: Optional[Any] +try: + system_random = random.SystemRandom() +except Exception: # pragma: no cover + system_random = None + + +def random_16() -> int: + if system_random is not None: + return system_random.randrange(0, 65536) + else: + return pool.random_16() + + +def between(first: int, last: int) -> int: + if system_random is not None: + return system_random.randrange(first, last + 1) + else: + return pool.random_between(first, last) diff --git a/backend/test/lib/python3.8/site-packages/dns/enum.py b/backend/test/lib/python3.8/site-packages/dns/enum.py new file mode 100644 index 0000000000000000000000000000000000000000..71461f1776f3990311f656cb37f6aab68e0b9f71 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/enum.py @@ -0,0 +1,116 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import enum +from typing import Type, TypeVar, Union + +TIntEnum = TypeVar("TIntEnum", bound="IntEnum") + + +class IntEnum(enum.IntEnum): + @classmethod + def _missing_(cls, value): + cls._check_value(value) + val = int.__new__(cls, value) + val._name_ = cls._extra_to_text(value, None) or f"{cls._prefix()}{value}" + val._value_ = value + return val + + @classmethod + def _check_value(cls, value): + max = cls._maximum() + if not isinstance(value, int): + raise TypeError + if value < 0 or value > max: + name = cls._short_name() + raise ValueError(f"{name} must be an int between >= 0 and <= {max}") + + @classmethod + def from_text(cls: Type[TIntEnum], text: str) -> TIntEnum: + text = text.upper() + try: + return cls[text] + except KeyError: + pass + value = cls._extra_from_text(text) + if value: + return value + prefix = cls._prefix() + if text.startswith(prefix) and text[len(prefix) :].isdigit(): + value = int(text[len(prefix) :]) + cls._check_value(value) + try: + return cls(value) + except ValueError: + return value + raise cls._unknown_exception_class() + + @classmethod + def to_text(cls: Type[TIntEnum], value: int) -> str: + cls._check_value(value) + try: + text = cls(value).name + except ValueError: + text = None + text = cls._extra_to_text(value, text) + if text is None: + text = f"{cls._prefix()}{value}" + return text + + @classmethod + def make(cls: Type[TIntEnum], value: Union[int, str]) -> TIntEnum: + """Convert text or a value into an enumerated type, if possible. + + *value*, the ``int`` or ``str`` to convert. + + Raises a class-specific exception if a ``str`` is provided that + cannot be converted. + + Raises ``ValueError`` if the value is out of range. + + Returns an enumeration from the calling class corresponding to the + value, if one is defined, or an ``int`` otherwise. + """ + + if isinstance(value, str): + return cls.from_text(value) + cls._check_value(value) + return cls(value) + + @classmethod + def _maximum(cls): + raise NotImplementedError # pragma: no cover + + @classmethod + def _short_name(cls): + return cls.__name__.lower() + + @classmethod + def _prefix(cls): + return "" + + @classmethod + def _extra_from_text(cls, text): # pylint: disable=W0613 + return None + + @classmethod + def _extra_to_text(cls, value, current_text): # pylint: disable=W0613 + return current_text + + @classmethod + def _unknown_exception_class(cls): + return ValueError diff --git a/backend/test/lib/python3.8/site-packages/dns/exception.py b/backend/test/lib/python3.8/site-packages/dns/exception.py new file mode 100644 index 0000000000000000000000000000000000000000..6982373de2a872057ca1fda3a2a752ff8d566355 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/exception.py @@ -0,0 +1,169 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Common DNS Exceptions. + +Dnspython modules may also define their own exceptions, which will +always be subclasses of ``DNSException``. +""" + + +from typing import Optional, Set + + +class DNSException(Exception): + """Abstract base class shared by all dnspython exceptions. + + It supports two basic modes of operation: + + a) Old/compatible mode is used if ``__init__`` was called with + empty *kwargs*. In compatible mode all *args* are passed + to the standard Python Exception class as before and all *args* are + printed by the standard ``__str__`` implementation. Class variable + ``msg`` (or doc string if ``msg`` is ``None``) is returned from ``str()`` + if *args* is empty. + + b) New/parametrized mode is used if ``__init__`` was called with + non-empty *kwargs*. + In the new mode *args* must be empty and all kwargs must match + those set in class variable ``supp_kwargs``. All kwargs are stored inside + ``self.kwargs`` and used in a new ``__str__`` implementation to construct + a formatted message based on the ``fmt`` class variable, a ``string``. + + In the simplest case it is enough to override the ``supp_kwargs`` + and ``fmt`` class variables to get nice parametrized messages. + """ + + msg: Optional[str] = None # non-parametrized message + supp_kwargs: Set[str] = set() # accepted parameters for _fmt_kwargs (sanity check) + fmt: Optional[str] = None # message parametrized with results from _fmt_kwargs + + def __init__(self, *args, **kwargs): + self._check_params(*args, **kwargs) + if kwargs: + # This call to a virtual method from __init__ is ok in our usage + self.kwargs = self._check_kwargs(**kwargs) # lgtm[py/init-calls-subclass] + self.msg = str(self) + else: + self.kwargs = dict() # defined but empty for old mode exceptions + if self.msg is None: + # doc string is better implicit message than empty string + self.msg = self.__doc__ + if args: + super().__init__(*args) + else: + super().__init__(self.msg) + + def _check_params(self, *args, **kwargs): + """Old exceptions supported only args and not kwargs. + + For sanity we do not allow to mix old and new behavior.""" + if args or kwargs: + assert bool(args) != bool( + kwargs + ), "keyword arguments are mutually exclusive with positional args" + + def _check_kwargs(self, **kwargs): + if kwargs: + assert ( + set(kwargs.keys()) == self.supp_kwargs + ), "following set of keyword args is required: %s" % (self.supp_kwargs) + return kwargs + + def _fmt_kwargs(self, **kwargs): + """Format kwargs before printing them. + + Resulting dictionary has to have keys necessary for str.format call + on fmt class variable. + """ + fmtargs = {} + for kw, data in kwargs.items(): + if isinstance(data, (list, set)): + # convert list of <someobj> to list of str(<someobj>) + fmtargs[kw] = list(map(str, data)) + if len(fmtargs[kw]) == 1: + # remove list brackets [] from single-item lists + fmtargs[kw] = fmtargs[kw].pop() + else: + fmtargs[kw] = data + return fmtargs + + def __str__(self): + if self.kwargs and self.fmt: + # provide custom message constructed from keyword arguments + fmtargs = self._fmt_kwargs(**self.kwargs) + return self.fmt.format(**fmtargs) + else: + # print *args directly in the same way as old DNSException + return super().__str__() + + +class FormError(DNSException): + """DNS message is malformed.""" + + +class SyntaxError(DNSException): + """Text input is malformed.""" + + +class UnexpectedEnd(SyntaxError): + """Text input ended unexpectedly.""" + + +class TooBig(DNSException): + """The DNS message is too big.""" + + +class Timeout(DNSException): + """The DNS operation timed out.""" + + supp_kwargs = {"timeout"} + fmt = "The DNS operation timed out after {timeout:.3f} seconds" + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class UnsupportedAlgorithm(DNSException): + """The DNSSEC algorithm is not supported.""" + + +class AlgorithmKeyMismatch(UnsupportedAlgorithm): + """The DNSSEC algorithm is not supported for the given key type.""" + + +class ValidationFailure(DNSException): + """The DNSSEC signature is invalid.""" + + +class DeniedByPolicy(DNSException): + """Denied by DNSSEC policy.""" + + +class ExceptionWrapper: + def __init__(self, exception_class): + self.exception_class = exception_class + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is not None and not isinstance(exc_val, self.exception_class): + raise self.exception_class(str(exc_val)) from exc_val + return False diff --git a/backend/test/lib/python3.8/site-packages/dns/flags.py b/backend/test/lib/python3.8/site-packages/dns/flags.py new file mode 100644 index 0000000000000000000000000000000000000000..4c60be1330b789a9a727fd943a59b44c9b8b8107 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/flags.py @@ -0,0 +1,123 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Message Flags.""" + +import enum +from typing import Any + +# Standard DNS flags + + +class Flag(enum.IntFlag): + #: Query Response + QR = 0x8000 + #: Authoritative Answer + AA = 0x0400 + #: Truncated Response + TC = 0x0200 + #: Recursion Desired + RD = 0x0100 + #: Recursion Available + RA = 0x0080 + #: Authentic Data + AD = 0x0020 + #: Checking Disabled + CD = 0x0010 + + +# EDNS flags + + +class EDNSFlag(enum.IntFlag): + #: DNSSEC answer OK + DO = 0x8000 + + +def _from_text(text: str, enum_class: Any) -> int: + flags = 0 + tokens = text.split() + for t in tokens: + flags |= enum_class[t.upper()] + return flags + + +def _to_text(flags: int, enum_class: Any) -> str: + text_flags = [] + for k, v in enum_class.__members__.items(): + if flags & v != 0: + text_flags.append(k) + return " ".join(text_flags) + + +def from_text(text: str) -> int: + """Convert a space-separated list of flag text values into a flags + value. + + Returns an ``int`` + """ + + return _from_text(text, Flag) + + +def to_text(flags: int) -> str: + """Convert a flags value into a space-separated list of flag text + values. + + Returns a ``str``. + """ + + return _to_text(flags, Flag) + + +def edns_from_text(text: str) -> int: + """Convert a space-separated list of EDNS flag text values into a EDNS + flags value. + + Returns an ``int`` + """ + + return _from_text(text, EDNSFlag) + + +def edns_to_text(flags: int) -> str: + """Convert an EDNS flags value into a space-separated list of EDNS flag + text values. + + Returns a ``str``. + """ + + return _to_text(flags, EDNSFlag) + + +### BEGIN generated Flag constants + +QR = Flag.QR +AA = Flag.AA +TC = Flag.TC +RD = Flag.RD +RA = Flag.RA +AD = Flag.AD +CD = Flag.CD + +### END generated Flag constants + +### BEGIN generated EDNSFlag constants + +DO = EDNSFlag.DO + +### END generated EDNSFlag constants diff --git a/backend/test/lib/python3.8/site-packages/dns/grange.py b/backend/test/lib/python3.8/site-packages/dns/grange.py new file mode 100644 index 0000000000000000000000000000000000000000..3a52278febf1302462e24ca5ece733492b55f096 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/grange.py @@ -0,0 +1,72 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2012-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS GENERATE range conversion.""" + +from typing import Tuple + +import dns + + +def from_text(text: str) -> Tuple[int, int, int]: + """Convert the text form of a range in a ``$GENERATE`` statement to an + integer. + + *text*, a ``str``, the textual range in ``$GENERATE`` form. + + Returns a tuple of three ``int`` values ``(start, stop, step)``. + """ + + start = -1 + stop = -1 + step = 1 + cur = "" + state = 0 + # state 0 1 2 + # x - y / z + + if text and text[0] == "-": + raise dns.exception.SyntaxError("Start cannot be a negative number") + + for c in text: + if c == "-" and state == 0: + start = int(cur) + cur = "" + state = 1 + elif c == "/": + stop = int(cur) + cur = "" + state = 2 + elif c.isdigit(): + cur += c + else: + raise dns.exception.SyntaxError("Could not parse %s" % (c)) + + if state == 0: + raise dns.exception.SyntaxError("no stop value specified") + elif state == 1: + stop = int(cur) + else: + assert state == 2 + step = int(cur) + + assert step >= 1 + assert start >= 0 + if start > stop: + raise dns.exception.SyntaxError("start must be <= stop") + + return (start, stop, step) diff --git a/backend/test/lib/python3.8/site-packages/dns/immutable.py b/backend/test/lib/python3.8/site-packages/dns/immutable.py new file mode 100644 index 0000000000000000000000000000000000000000..cab8d6fb5a03164734bf5af4f97ad45b81c0a9fb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/immutable.py @@ -0,0 +1,62 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import collections.abc +from typing import Any + +from dns._immutable_ctx import immutable + + +@immutable +class Dict(collections.abc.Mapping): # lgtm[py/missing-equals] + def __init__(self, dictionary: Any, no_copy: bool = False): + """Make an immutable dictionary from the specified dictionary. + + If *no_copy* is `True`, then *dictionary* will be wrapped instead + of copied. Only set this if you are sure there will be no external + references to the dictionary. + """ + if no_copy and isinstance(dictionary, dict): + self._odict = dictionary + else: + self._odict = dict(dictionary) + self._hash = None + + def __getitem__(self, key): + return self._odict.__getitem__(key) + + def __hash__(self): # pylint: disable=invalid-hash-returned + if self._hash is None: + h = 0 + for key in sorted(self._odict.keys()): + h ^= hash(key) + object.__setattr__(self, "_hash", h) + # this does return an int, but pylint doesn't figure that out + return self._hash + + def __len__(self): + return len(self._odict) + + def __iter__(self): + return iter(self._odict) + + +def constify(o: Any) -> Any: + """ + Convert mutable types to immutable types. + """ + if isinstance(o, bytearray): + return bytes(o) + if isinstance(o, tuple): + try: + hash(o) + return o + except Exception: + return tuple(constify(elt) for elt in o) + if isinstance(o, list): + return tuple(constify(elt) for elt in o) + if isinstance(o, dict): + cdict = dict() + for k, v in o.items(): + cdict[k] = constify(v) + return Dict(cdict, True) + return o diff --git a/backend/test/lib/python3.8/site-packages/dns/inet.py b/backend/test/lib/python3.8/site-packages/dns/inet.py new file mode 100644 index 0000000000000000000000000000000000000000..02e925c6bf89f76ba9a8c0b10abfb97a32382c74 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/inet.py @@ -0,0 +1,180 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Generic Internet address helper functions.""" + +import socket +from typing import Any, Optional, Tuple + +import dns.ipv4 +import dns.ipv6 + +# We assume that AF_INET and AF_INET6 are always defined. We keep +# these here for the benefit of any old code (unlikely though that +# is!). +AF_INET = socket.AF_INET +AF_INET6 = socket.AF_INET6 + + +def inet_pton(family: int, text: str) -> bytes: + """Convert the textual form of a network address into its binary form. + + *family* is an ``int``, the address family. + + *text* is a ``str``, the textual address. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``bytes``. + """ + + if family == AF_INET: + return dns.ipv4.inet_aton(text) + elif family == AF_INET6: + return dns.ipv6.inet_aton(text, True) + else: + raise NotImplementedError + + +def inet_ntop(family: int, address: bytes) -> str: + """Convert the binary form of a network address into its textual form. + + *family* is an ``int``, the address family. + + *address* is a ``bytes``, the network address in binary form. + + Raises ``NotImplementedError`` if the address family specified is not + implemented. + + Returns a ``str``. + """ + + if family == AF_INET: + return dns.ipv4.inet_ntoa(address) + elif family == AF_INET6: + return dns.ipv6.inet_ntoa(address) + else: + raise NotImplementedError + + +def af_for_address(text: str) -> int: + """Determine the address family of a textual-form network address. + + *text*, a ``str``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns an ``int``. + """ + + try: + dns.ipv4.inet_aton(text) + return AF_INET + except Exception: + try: + dns.ipv6.inet_aton(text, True) + return AF_INET6 + except Exception: + raise ValueError + + +def is_multicast(text: str) -> bool: + """Is the textual-form network address a multicast address? + + *text*, a ``str``, the textual address. + + Raises ``ValueError`` if the address family cannot be determined + from the input. + + Returns a ``bool``. + """ + + try: + first = dns.ipv4.inet_aton(text)[0] + return first >= 224 and first <= 239 + except Exception: + try: + first = dns.ipv6.inet_aton(text, True)[0] + return first == 255 + except Exception: + raise ValueError + + +def is_address(text: str) -> bool: + """Is the specified string an IPv4 or IPv6 address? + + *text*, a ``str``, the textual address. + + Returns a ``bool``. + """ + + try: + dns.ipv4.inet_aton(text) + return True + except Exception: + try: + dns.ipv6.inet_aton(text, True) + return True + except Exception: + return False + + +def low_level_address_tuple( + high_tuple: Tuple[str, int], af: Optional[int] = None +) -> Any: + """Given a "high-level" address tuple, i.e. + an (address, port) return the appropriate "low-level" address tuple + suitable for use in socket calls. + + If an *af* other than ``None`` is provided, it is assumed the + address in the high-level tuple is valid and has that af. If af + is ``None``, then af_for_address will be called. + """ + address, port = high_tuple + if af is None: + af = af_for_address(address) + if af == AF_INET: + return (address, port) + elif af == AF_INET6: + i = address.find("%") + if i < 0: + # no scope, shortcut! + return (address, port, 0, 0) + # try to avoid getaddrinfo() + addrpart = address[:i] + scope = address[i + 1 :] + if scope.isdigit(): + return (addrpart, port, 0, int(scope)) + try: + return (addrpart, port, 0, socket.if_nametoindex(scope)) + except AttributeError: # pragma: no cover (we can't really test this) + ai_flags = socket.AI_NUMERICHOST + ((*_, tup), *_) = socket.getaddrinfo(address, port, flags=ai_flags) + return tup + else: + raise NotImplementedError(f"unknown address family {af}") + + +def any_for_af(af): + """Return the 'any' address for the specified address family.""" + if af == socket.AF_INET: + return "0.0.0.0" + elif af == socket.AF_INET6: + return "::" + raise NotImplementedError(f"unknown address family {af}") diff --git a/backend/test/lib/python3.8/site-packages/dns/ipv4.py b/backend/test/lib/python3.8/site-packages/dns/ipv4.py new file mode 100644 index 0000000000000000000000000000000000000000..f549150a901356c4907efba97181ee385f1eebfc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/ipv4.py @@ -0,0 +1,64 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv4 helper functions.""" + +import struct +from typing import Union + +import dns.exception + + +def inet_ntoa(address: bytes) -> str: + """Convert an IPv4 address in binary form to text form. + + *address*, a ``bytes``, the IPv4 address in binary form. + + Returns a ``str``. + """ + + if len(address) != 4: + raise dns.exception.SyntaxError + return "%u.%u.%u.%u" % (address[0], address[1], address[2], address[3]) + + +def inet_aton(text: Union[str, bytes]) -> bytes: + """Convert an IPv4 address in text form to binary form. + + *text*, a ``str`` or ``bytes``, the IPv4 address in textual form. + + Returns a ``bytes``. + """ + + if not isinstance(text, bytes): + btext = text.encode() + else: + btext = text + parts = btext.split(b".") + if len(parts) != 4: + raise dns.exception.SyntaxError + for part in parts: + if not part.isdigit(): + raise dns.exception.SyntaxError + if len(part) > 1 and part[0] == ord("0"): + # No leading zeros + raise dns.exception.SyntaxError + try: + b = [int(part) for part in parts] + return struct.pack("BBBB", *b) + except Exception: + raise dns.exception.SyntaxError diff --git a/backend/test/lib/python3.8/site-packages/dns/ipv6.py b/backend/test/lib/python3.8/site-packages/dns/ipv6.py new file mode 100644 index 0000000000000000000000000000000000000000..0cc3d868f567a31f0089009f71320c9acf81fbd5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/ipv6.py @@ -0,0 +1,208 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""IPv6 helper functions.""" + +import binascii +import re +from typing import List, Union + +import dns.exception +import dns.ipv4 + +_leading_zero = re.compile(r"0+([0-9a-f]+)") + + +def inet_ntoa(address: bytes) -> str: + """Convert an IPv6 address in binary form to text form. + + *address*, a ``bytes``, the IPv6 address in binary form. + + Raises ``ValueError`` if the address isn't 16 bytes long. + Returns a ``str``. + """ + + if len(address) != 16: + raise ValueError("IPv6 addresses are 16 bytes long") + hex = binascii.hexlify(address) + chunks = [] + i = 0 + l = len(hex) + while i < l: + chunk = hex[i : i + 4].decode() + # strip leading zeros. we do this with an re instead of + # with lstrip() because lstrip() didn't support chars until + # python 2.2.2 + m = _leading_zero.match(chunk) + if m is not None: + chunk = m.group(1) + chunks.append(chunk) + i += 4 + # + # Compress the longest subsequence of 0-value chunks to :: + # + best_start = 0 + best_len = 0 + start = -1 + last_was_zero = False + for i in range(8): + if chunks[i] != "0": + if last_was_zero: + end = i + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + last_was_zero = False + elif not last_was_zero: + start = i + last_was_zero = True + if last_was_zero: + end = 8 + current_len = end - start + if current_len > best_len: + best_start = start + best_len = current_len + if best_len > 1: + if best_start == 0 and (best_len == 6 or best_len == 5 and chunks[5] == "ffff"): + # We have an embedded IPv4 address + if best_len == 6: + prefix = "::" + else: + prefix = "::ffff:" + thex = prefix + dns.ipv4.inet_ntoa(address[12:]) + else: + thex = ( + ":".join(chunks[:best_start]) + + "::" + + ":".join(chunks[best_start + best_len :]) + ) + else: + thex = ":".join(chunks) + return thex + + +_v4_ending = re.compile(rb"(.*):(\d+\.\d+\.\d+\.\d+)$") +_colon_colon_start = re.compile(rb"::.*") +_colon_colon_end = re.compile(rb".*::$") + + +def inet_aton(text: Union[str, bytes], ignore_scope: bool = False) -> bytes: + """Convert an IPv6 address in text form to binary form. + + *text*, a ``str``, the IPv6 address in textual form. + + *ignore_scope*, a ``bool``. If ``True``, a scope will be ignored. + If ``False``, the default, it is an error for a scope to be present. + + Returns a ``bytes``. + """ + + # + # Our aim here is not something fast; we just want something that works. + # + if not isinstance(text, bytes): + btext = text.encode() + else: + btext = text + + if ignore_scope: + parts = btext.split(b"%") + l = len(parts) + if l == 2: + btext = parts[0] + elif l > 2: + raise dns.exception.SyntaxError + + if btext == b"": + raise dns.exception.SyntaxError + elif btext.endswith(b":") and not btext.endswith(b"::"): + raise dns.exception.SyntaxError + elif btext.startswith(b":") and not btext.startswith(b"::"): + raise dns.exception.SyntaxError + elif btext == b"::": + btext = b"0::" + # + # Get rid of the icky dot-quad syntax if we have it. + # + m = _v4_ending.match(btext) + if m is not None: + b = dns.ipv4.inet_aton(m.group(2)) + btext = ( + "{}:{:02x}{:02x}:{:02x}{:02x}".format( + m.group(1).decode(), b[0], b[1], b[2], b[3] + ) + ).encode() + # + # Try to turn '::<whatever>' into ':<whatever>'; if no match try to + # turn '<whatever>::' into '<whatever>:' + # + m = _colon_colon_start.match(btext) + if m is not None: + btext = btext[1:] + else: + m = _colon_colon_end.match(btext) + if m is not None: + btext = btext[:-1] + # + # Now canonicalize into 8 chunks of 4 hex digits each + # + chunks = btext.split(b":") + l = len(chunks) + if l > 8: + raise dns.exception.SyntaxError + seen_empty = False + canonical: List[bytes] = [] + for c in chunks: + if c == b"": + if seen_empty: + raise dns.exception.SyntaxError + seen_empty = True + for _ in range(0, 8 - l + 1): + canonical.append(b"0000") + else: + lc = len(c) + if lc > 4: + raise dns.exception.SyntaxError + if lc != 4: + c = (b"0" * (4 - lc)) + c + canonical.append(c) + if l < 8 and not seen_empty: + raise dns.exception.SyntaxError + btext = b"".join(canonical) + + # + # Finally we can go to binary. + # + try: + return binascii.unhexlify(btext) + except (binascii.Error, TypeError): + raise dns.exception.SyntaxError + + +_mapped_prefix = b"\x00" * 10 + b"\xff\xff" + + +def is_mapped(address: bytes) -> bool: + """Is the specified address a mapped IPv4 address? + + *address*, a ``bytes`` is an IPv6 address in binary form. + + Returns a ``bool``. + """ + + return address.startswith(_mapped_prefix) diff --git a/backend/test/lib/python3.8/site-packages/dns/message.py b/backend/test/lib/python3.8/site-packages/dns/message.py new file mode 100644 index 0000000000000000000000000000000000000000..daae6363a9bd042db2c13609b02894e0e6f7b3c4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/message.py @@ -0,0 +1,1829 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Messages""" + +import contextlib +import io +import time +from typing import Any, Dict, List, Optional, Tuple, Union + +import dns.edns +import dns.entropy +import dns.enum +import dns.exception +import dns.flags +import dns.name +import dns.opcode +import dns.rcode +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.ANY.OPT +import dns.rdtypes.ANY.TSIG +import dns.renderer +import dns.rrset +import dns.tsig +import dns.ttl +import dns.wire + + +class ShortHeader(dns.exception.FormError): + """The DNS packet passed to from_wire() is too short.""" + + +class TrailingJunk(dns.exception.FormError): + """The DNS packet passed to from_wire() has extra junk at the end of it.""" + + +class UnknownHeaderField(dns.exception.DNSException): + """The header field name was not recognized when converting from text + into a message.""" + + +class BadEDNS(dns.exception.FormError): + """An OPT record occurred somewhere other than + the additional data section.""" + + +class BadTSIG(dns.exception.FormError): + """A TSIG record occurred somewhere other than the end of + the additional data section.""" + + +class UnknownTSIGKey(dns.exception.DNSException): + """A TSIG with an unknown key was received.""" + + +class Truncated(dns.exception.DNSException): + """The truncated flag is set.""" + + supp_kwargs = {"message"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def message(self): + """As much of the message as could be processed. + + Returns a ``dns.message.Message``. + """ + return self.kwargs["message"] + + +class NotQueryResponse(dns.exception.DNSException): + """Message is not a response to a query.""" + + +class ChainTooLong(dns.exception.DNSException): + """The CNAME chain is too long.""" + + +class AnswerForNXDOMAIN(dns.exception.DNSException): + """The rcode is NXDOMAIN but an answer was found.""" + + +class NoPreviousName(dns.exception.SyntaxError): + """No previous name was known.""" + + +class MessageSection(dns.enum.IntEnum): + """Message sections""" + + QUESTION = 0 + ANSWER = 1 + AUTHORITY = 2 + ADDITIONAL = 3 + + @classmethod + def _maximum(cls): + return 3 + + +class MessageError: + def __init__(self, exception: Exception, offset: int): + self.exception = exception + self.offset = offset + + +DEFAULT_EDNS_PAYLOAD = 1232 +MAX_CHAIN = 16 + +IndexKeyType = Tuple[ + int, + dns.name.Name, + dns.rdataclass.RdataClass, + dns.rdatatype.RdataType, + Optional[dns.rdatatype.RdataType], + Optional[dns.rdataclass.RdataClass], +] +IndexType = Dict[IndexKeyType, dns.rrset.RRset] +SectionType = Union[int, str, List[dns.rrset.RRset]] + + +class Message: + """A DNS message.""" + + _section_enum = MessageSection + + def __init__(self, id: Optional[int] = None): + if id is None: + self.id = dns.entropy.random_16() + else: + self.id = id + self.flags = 0 + self.sections: List[List[dns.rrset.RRset]] = [[], [], [], []] + self.opt: Optional[dns.rrset.RRset] = None + self.request_payload = 0 + self.pad = 0 + self.keyring: Any = None + self.tsig: Optional[dns.rrset.RRset] = None + self.request_mac = b"" + self.xfr = False + self.origin: Optional[dns.name.Name] = None + self.tsig_ctx: Optional[Any] = None + self.index: IndexType = {} + self.errors: List[MessageError] = [] + self.time = 0.0 + + @property + def question(self) -> List[dns.rrset.RRset]: + """The question section.""" + return self.sections[0] + + @question.setter + def question(self, v): + self.sections[0] = v + + @property + def answer(self) -> List[dns.rrset.RRset]: + """The answer section.""" + return self.sections[1] + + @answer.setter + def answer(self, v): + self.sections[1] = v + + @property + def authority(self) -> List[dns.rrset.RRset]: + """The authority section.""" + return self.sections[2] + + @authority.setter + def authority(self, v): + self.sections[2] = v + + @property + def additional(self) -> List[dns.rrset.RRset]: + """The additional data section.""" + return self.sections[3] + + @additional.setter + def additional(self, v): + self.sections[3] = v + + def __repr__(self): + return "<DNS message, ID " + repr(self.id) + ">" + + def __str__(self): + return self.to_text() + + def to_text( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any], + ) -> str: + """Convert the message to text. + + The *origin*, *relativize*, and any other keyword + arguments are passed to the RRset ``to_wire()`` method. + + Returns a ``str``. + """ + + s = io.StringIO() + s.write("id %d\n" % self.id) + s.write("opcode %s\n" % dns.opcode.to_text(self.opcode())) + s.write("rcode %s\n" % dns.rcode.to_text(self.rcode())) + s.write("flags %s\n" % dns.flags.to_text(self.flags)) + if self.edns >= 0: + s.write("edns %s\n" % self.edns) + if self.ednsflags != 0: + s.write("eflags %s\n" % dns.flags.edns_to_text(self.ednsflags)) + s.write("payload %d\n" % self.payload) + for opt in self.options: + s.write("option %s\n" % opt.to_text()) + for name, which in self._section_enum.__members__.items(): + s.write(f";{name}\n") + for rrset in self.section_from_number(which): + s.write(rrset.to_text(origin, relativize, **kw)) + s.write("\n") + # + # We strip off the final \n so the caller can print the result without + # doing weird things to get around eccentricities in Python print + # formatting + # + return s.getvalue()[:-1] + + def __eq__(self, other): + """Two messages are equal if they have the same content in the + header, question, answer, and authority sections. + + Returns a ``bool``. + """ + + if not isinstance(other, Message): + return False + if self.id != other.id: + return False + if self.flags != other.flags: + return False + for i, section in enumerate(self.sections): + other_section = other.sections[i] + for n in section: + if n not in other_section: + return False + for n in other_section: + if n not in section: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def is_response(self, other: "Message") -> bool: + """Is *other*, also a ``dns.message.Message``, a response to this + message? + + Returns a ``bool``. + """ + + if ( + other.flags & dns.flags.QR == 0 + or self.id != other.id + or dns.opcode.from_flags(self.flags) != dns.opcode.from_flags(other.flags) + ): + return False + if other.rcode() in { + dns.rcode.FORMERR, + dns.rcode.SERVFAIL, + dns.rcode.NOTIMP, + dns.rcode.REFUSED, + }: + # We don't check the question section in these cases if + # the other question section is empty, even though they + # still really ought to have a question section. + if len(other.question) == 0: + return True + if dns.opcode.is_update(self.flags): + # This is assuming the "sender doesn't include anything + # from the update", but we don't care to check the other + # case, which is that all the sections are returned and + # identical. + return True + for n in self.question: + if n not in other.question: + return False + for n in other.question: + if n not in self.question: + return False + return True + + def section_number(self, section: List[dns.rrset.RRset]) -> int: + """Return the "section number" of the specified section for use + in indexing. + + *section* is one of the section attributes of this message. + + Raises ``ValueError`` if the section isn't known. + + Returns an ``int``. + """ + + for i, our_section in enumerate(self.sections): + if section is our_section: + return self._section_enum(i) + raise ValueError("unknown section") + + def section_from_number(self, number: int) -> List[dns.rrset.RRset]: + """Return the section list associated with the specified section + number. + + *number* is a section number `int` or the text form of a section + name. + + Raises ``ValueError`` if the section isn't known. + + Returns a ``list``. + """ + + section = self._section_enum.make(number) + return self.sections[section] + + def find_rrset( + self, + section: SectionType, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + deleting: Optional[dns.rdataclass.RdataClass] = None, + create: bool = False, + force_unique: bool = False, + idna_codec: Optional[dns.name.IDNACodec] = None, + ) -> dns.rrset.RRset: + """Find the RRset with the given attributes in the specified section. + + *section*, an ``int`` section number, a ``str`` section name, or one of + the section attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.find_rrset(my_message.answer, name, rdclass, rdtype) + my_message.find_rrset(dns.message.ANSWER, name, rdclass, rdtype) + my_message.find_rrset("ANSWER", name, rdclass, rdtype) + + *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. + + *rdclass*, an ``int`` or ``str``, the class of the RRset. + + *rdtype*, an ``int`` or ``str``, the type of the RRset. + + *covers*, an ``int`` or ``str``, the covers value of the RRset. + The default is ``dns.rdatatype.NONE``. + + *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the + RRset. The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Raises ``KeyError`` if the RRset was not found and create was + ``False``. + + Returns a ``dns.rrset.RRset object``. + """ + + if isinstance(section, int): + section_number = section + section = self.section_from_number(section_number) + elif isinstance(section, str): + section_number = MessageSection.from_text(section) + section = self.section_from_number(section_number) + else: + section_number = self.section_number(section) + if isinstance(name, str): + name = dns.name.from_text(name, idna_codec=idna_codec) + rdtype = dns.rdatatype.RdataType.make(rdtype) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + covers = dns.rdatatype.RdataType.make(covers) + if deleting is not None: + deleting = dns.rdataclass.RdataClass.make(deleting) + key = (section_number, name, rdclass, rdtype, covers, deleting) + if not force_unique: + if self.index is not None: + rrset = self.index.get(key) + if rrset is not None: + return rrset + else: + for rrset in section: + if rrset.full_match(name, rdclass, rdtype, covers, deleting): + return rrset + if not create: + raise KeyError + rrset = dns.rrset.RRset(name, rdclass, rdtype, covers, deleting) + section.append(rrset) + if self.index is not None: + self.index[key] = rrset + return rrset + + def get_rrset( + self, + section: SectionType, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + deleting: Optional[dns.rdataclass.RdataClass] = None, + create: bool = False, + force_unique: bool = False, + idna_codec: Optional[dns.name.IDNACodec] = None, + ) -> Optional[dns.rrset.RRset]: + """Get the RRset with the given attributes in the specified section. + + If the RRset is not found, None is returned. + + *section*, an ``int`` section number, a ``str`` section name, or one of + the section attributes of this message. This specifies the + the section of the message to search. For example:: + + my_message.get_rrset(my_message.answer, name, rdclass, rdtype) + my_message.get_rrset(dns.message.ANSWER, name, rdclass, rdtype) + my_message.get_rrset("ANSWER", name, rdclass, rdtype) + + *name*, a ``dns.name.Name`` or ``str``, the name of the RRset. + + *rdclass*, an ``int`` or ``str``, the class of the RRset. + + *rdtype*, an ``int`` or ``str``, the type of the RRset. + + *covers*, an ``int`` or ``str``, the covers value of the RRset. + The default is ``dns.rdatatype.NONE``. + + *deleting*, an ``int``, ``str``, or ``None``, the deleting value of the + RRset. The default is ``None``. + + *create*, a ``bool``. If ``True``, create the RRset if it is not found. + The created RRset is appended to *section*. + + *force_unique*, a ``bool``. If ``True`` and *create* is also ``True``, + create a new RRset regardless of whether a matching RRset exists + already. The default is ``False``. This is useful when creating + DDNS Update messages, as order matters for them. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.rrset.RRset object`` or ``None``. + """ + + try: + rrset = self.find_rrset( + section, + name, + rdclass, + rdtype, + covers, + deleting, + create, + force_unique, + idna_codec, + ) + except KeyError: + rrset = None + return rrset + + def _compute_opt_reserve(self) -> int: + """Compute the size required for the OPT RR, padding excluded""" + if not self.opt: + return 0 + # 1 byte for the root name, 10 for the standard RR fields + size = 11 + # This would be more efficient if options had a size() method, but we won't + # worry about that for now. We also don't worry if there is an existing padding + # option, as it is unlikely and probably harmless, as the worst case is that we + # may add another, and this seems to be legal. + for option in self.opt[0].options: + wire = option.to_wire() + # We add 4 here to account for the option type and length + size += len(wire) + 4 + if self.pad: + # Padding will be added, so again add the option type and length. + size += 4 + return size + + def _compute_tsig_reserve(self) -> int: + """Compute the size required for the TSIG RR""" + # This would be more efficient if TSIGs had a size method, but we won't + # worry about for now. Also, we can't really cope with the potential + # compressibility of the TSIG owner name, so we estimate with the uncompressed + # size. We will disable compression when TSIG and padding are both is active + # so that the padding comes out right. + if not self.tsig: + return 0 + f = io.BytesIO() + self.tsig.to_wire(f) + return len(f.getvalue()) + + def to_wire( + self, + origin: Optional[dns.name.Name] = None, + max_size: int = 0, + multi: bool = False, + tsig_ctx: Optional[Any] = None, + **kw: Dict[str, Any], + ) -> bytes: + """Return a string containing the message in DNS compressed wire + format. + + Additional keyword arguments are passed to the RRset ``to_wire()`` + method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin to be appended + to any relative names. If ``None``, and the message has an origin + attribute that is not ``None``, then it will be used. + + *max_size*, an ``int``, the maximum size of the wire format + output; default is 0, which means "the message's request + payload, if nonzero, or 65535". + + *multi*, a ``bool``, should be set to ``True`` if this message is + part of a multiple message sequence. + + *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the + ongoing TSIG context, used when signing zone transfers. + + Raises ``dns.exception.TooBig`` if *max_size* was exceeded. + + Returns a ``bytes``. + """ + + if origin is None and self.origin is not None: + origin = self.origin + if max_size == 0: + if self.request_payload != 0: + max_size = self.request_payload + else: + max_size = 65535 + if max_size < 512: + max_size = 512 + elif max_size > 65535: + max_size = 65535 + r = dns.renderer.Renderer(self.id, self.flags, max_size, origin) + opt_reserve = self._compute_opt_reserve() + r.reserve(opt_reserve) + tsig_reserve = self._compute_tsig_reserve() + r.reserve(tsig_reserve) + for rrset in self.question: + r.add_question(rrset.name, rrset.rdtype, rrset.rdclass) + for rrset in self.answer: + r.add_rrset(dns.renderer.ANSWER, rrset, **kw) + for rrset in self.authority: + r.add_rrset(dns.renderer.AUTHORITY, rrset, **kw) + for rrset in self.additional: + r.add_rrset(dns.renderer.ADDITIONAL, rrset, **kw) + r.release_reserved() + if self.opt is not None: + r.add_opt(self.opt, self.pad, opt_reserve, tsig_reserve) + r.write_header() + if self.tsig is not None: + (new_tsig, ctx) = dns.tsig.sign( + r.get_wire(), + self.keyring, + self.tsig[0], + int(time.time()), + self.request_mac, + tsig_ctx, + multi, + ) + self.tsig.clear() + self.tsig.add(new_tsig) + r.add_rrset(dns.renderer.ADDITIONAL, self.tsig) + r.write_header() + if multi: + self.tsig_ctx = ctx + return r.get_wire() + + @staticmethod + def _make_tsig( + keyname, algorithm, time_signed, fudge, mac, original_id, error, other + ): + tsig = dns.rdtypes.ANY.TSIG.TSIG( + dns.rdataclass.ANY, + dns.rdatatype.TSIG, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ) + return dns.rrset.from_rdata(keyname, 0, tsig) + + def use_tsig( + self, + keyring: Any, + keyname: Optional[Union[dns.name.Name, str]] = None, + fudge: int = 300, + original_id: Optional[int] = None, + tsig_error: int = 0, + other_data: bytes = b"", + algorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, + ) -> None: + """When sending, a TSIG signature using the specified key + should be added. + + *key*, a ``dns.tsig.Key`` is the key to use. If a key is specified, + the *keyring* and *algorithm* fields are not used. + + *keyring*, a ``dict``, ``callable`` or ``dns.tsig.Key``, is either + the TSIG keyring or key to use. + + The format of a keyring dict is a mapping from TSIG key name, as + ``dns.name.Name`` to ``dns.tsig.Key`` or a TSIG secret, a ``bytes``. + If a ``dict`` *keyring* is specified but a *keyname* is not, the key + used will be the first key in the *keyring*. Note that the order of + keys in a dictionary is not defined, so applications should supply a + keyname when a ``dict`` keyring is used, unless they know the keyring + contains only one key. If a ``callable`` keyring is specified, the + callable will be called with the message and the keyname, and is + expected to return a key. + + *keyname*, a ``dns.name.Name``, ``str`` or ``None``, the name of + this TSIG key to use; defaults to ``None``. If *keyring* is a + ``dict``, the key must be defined in it. If *keyring* is a + ``dns.tsig.Key``, this is ignored. + + *fudge*, an ``int``, the TSIG time fudge. + + *original_id*, an ``int``, the TSIG original id. If ``None``, + the message's id is used. + + *tsig_error*, an ``int``, the TSIG error code. + + *other_data*, a ``bytes``, the TSIG other data. + + *algorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. This is + only used if *keyring* is a ``dict``, and the key entry is a ``bytes``. + """ + + if isinstance(keyring, dns.tsig.Key): + key = keyring + keyname = key.name + elif callable(keyring): + key = keyring(self, keyname) + else: + if isinstance(keyname, str): + keyname = dns.name.from_text(keyname) + if keyname is None: + keyname = next(iter(keyring)) + key = keyring[keyname] + if isinstance(key, bytes): + key = dns.tsig.Key(keyname, key, algorithm) + self.keyring = key + if original_id is None: + original_id = self.id + self.tsig = self._make_tsig( + keyname, + self.keyring.algorithm, + 0, + fudge, + b"\x00" * dns.tsig.mac_sizes[self.keyring.algorithm], + original_id, + tsig_error, + other_data, + ) + + @property + def keyname(self) -> Optional[dns.name.Name]: + if self.tsig: + return self.tsig.name + else: + return None + + @property + def keyalgorithm(self) -> Optional[dns.name.Name]: + if self.tsig: + return self.tsig[0].algorithm + else: + return None + + @property + def mac(self) -> Optional[bytes]: + if self.tsig: + return self.tsig[0].mac + else: + return None + + @property + def tsig_error(self) -> Optional[int]: + if self.tsig: + return self.tsig[0].error + else: + return None + + @property + def had_tsig(self) -> bool: + return bool(self.tsig) + + @staticmethod + def _make_opt(flags=0, payload=DEFAULT_EDNS_PAYLOAD, options=None): + opt = dns.rdtypes.ANY.OPT.OPT(payload, dns.rdatatype.OPT, options or ()) + return dns.rrset.from_rdata(dns.name.root, int(flags), opt) + + def use_edns( + self, + edns: Optional[Union[int, bool]] = 0, + ednsflags: int = 0, + payload: int = DEFAULT_EDNS_PAYLOAD, + request_payload: Optional[int] = None, + options: Optional[List[dns.edns.Option]] = None, + pad: int = 0, + ) -> None: + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying ``None``, ``False``, + or ``-1`` means "do not use EDNS", and in this case the other parameters are + ignored. Specifying ``True`` is equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the maximum + size of UDP datagram the sender can handle. I.e. how big a response to this + message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when sending this + message. If not specified, defaults to the value of *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS options. + + *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add + padding bytes to make the message size a multiple of *pad*. Note that if + padding is non-zero, an EDNS PADDING option will always be added to the + message. + """ + + if edns is None or edns is False: + edns = -1 + elif edns is True: + edns = 0 + if edns < 0: + self.opt = None + self.request_payload = 0 + else: + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= 0xFF00FFFF + ednsflags |= edns << 16 + if options is None: + options = [] + self.opt = self._make_opt(ednsflags, payload, options) + if request_payload is None: + request_payload = payload + self.request_payload = request_payload + self.pad = pad + + @property + def edns(self) -> int: + if self.opt: + return (self.ednsflags & 0xFF0000) >> 16 + else: + return -1 + + @property + def ednsflags(self) -> int: + if self.opt: + return self.opt.ttl + else: + return 0 + + @ednsflags.setter + def ednsflags(self, v): + if self.opt: + self.opt.ttl = v + elif v: + self.opt = self._make_opt(v) + + @property + def payload(self) -> int: + if self.opt: + return self.opt[0].payload + else: + return 0 + + @property + def options(self) -> Tuple: + if self.opt: + return self.opt[0].options + else: + return () + + def want_dnssec(self, wanted: bool = True) -> None: + """Enable or disable 'DNSSEC desired' flag in requests. + + *wanted*, a ``bool``. If ``True``, then DNSSEC data is + desired in the response, EDNS is enabled if required, and then + the DO bit is set. If ``False``, the DO bit is cleared if + EDNS is enabled. + """ + + if wanted: + self.ednsflags |= dns.flags.DO + elif self.opt: + self.ednsflags &= ~dns.flags.DO + + def rcode(self) -> dns.rcode.Rcode: + """Return the rcode. + + Returns a ``dns.rcode.Rcode``. + """ + return dns.rcode.from_flags(int(self.flags), int(self.ednsflags)) + + def set_rcode(self, rcode: dns.rcode.Rcode) -> None: + """Set the rcode. + + *rcode*, a ``dns.rcode.Rcode``, is the rcode to set. + """ + (value, evalue) = dns.rcode.to_flags(rcode) + self.flags &= 0xFFF0 + self.flags |= value + self.ednsflags &= 0x00FFFFFF + self.ednsflags |= evalue + + def opcode(self) -> dns.opcode.Opcode: + """Return the opcode. + + Returns a ``dns.opcode.Opcode``. + """ + return dns.opcode.from_flags(int(self.flags)) + + def set_opcode(self, opcode: dns.opcode.Opcode) -> None: + """Set the opcode. + + *opcode*, a ``dns.opcode.Opcode``, is the opcode to set. + """ + self.flags &= 0x87FF + self.flags |= dns.opcode.to_flags(opcode) + + def _get_one_rr_per_rrset(self, value): + # What the caller picked is fine. + return value + + # pylint: disable=unused-argument + + def _parse_rr_header(self, section, name, rdclass, rdtype): + return (rdclass, rdtype, None, False) + + # pylint: enable=unused-argument + + def _parse_special_rr_header(self, section, count, position, name, rdclass, rdtype): + if rdtype == dns.rdatatype.OPT: + if ( + section != MessageSection.ADDITIONAL + or self.opt + or name != dns.name.root + ): + raise BadEDNS + elif rdtype == dns.rdatatype.TSIG: + if ( + section != MessageSection.ADDITIONAL + or rdclass != dns.rdatatype.ANY + or position != count - 1 + ): + raise BadTSIG + return (rdclass, rdtype, None, False) + + +class ChainingResult: + """The result of a call to dns.message.QueryMessage.resolve_chaining(). + + The ``answer`` attribute is the answer RRSet, or ``None`` if it doesn't + exist. + + The ``canonical_name`` attribute is the canonical name after all + chaining has been applied (this is the same name as ``rrset.name`` in cases + where rrset is not ``None``). + + The ``minimum_ttl`` attribute is the minimum TTL, i.e. the TTL to + use if caching the data. It is the smallest of all the CNAME TTLs + and either the answer TTL if it exists or the SOA TTL and SOA + minimum values for negative answers. + + The ``cnames`` attribute is a list of all the CNAME RRSets followed to + get to the canonical name. + """ + + def __init__( + self, + canonical_name: dns.name.Name, + answer: Optional[dns.rrset.RRset], + minimum_ttl: int, + cnames: List[dns.rrset.RRset], + ): + self.canonical_name = canonical_name + self.answer = answer + self.minimum_ttl = minimum_ttl + self.cnames = cnames + + +class QueryMessage(Message): + def resolve_chaining(self) -> ChainingResult: + """Follow the CNAME chain in the response to determine the answer + RRset. + + Raises ``dns.message.NotQueryResponse`` if the message is not + a response. + + Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. + + Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN + but an answer was found. + + Raises ``dns.exception.FormError`` if the question count is not 1. + + Returns a ChainingResult object. + """ + if self.flags & dns.flags.QR == 0: + raise NotQueryResponse + if len(self.question) != 1: + raise dns.exception.FormError + question = self.question[0] + qname = question.name + min_ttl = dns.ttl.MAX_TTL + answer = None + count = 0 + cnames = [] + while count < MAX_CHAIN: + try: + answer = self.find_rrset( + self.answer, qname, question.rdclass, question.rdtype + ) + min_ttl = min(min_ttl, answer.ttl) + break + except KeyError: + if question.rdtype != dns.rdatatype.CNAME: + try: + crrset = self.find_rrset( + self.answer, qname, question.rdclass, dns.rdatatype.CNAME + ) + cnames.append(crrset) + min_ttl = min(min_ttl, crrset.ttl) + for rd in crrset: + qname = rd.target + break + count += 1 + continue + except KeyError: + # Exit the chaining loop + break + else: + # Exit the chaining loop + break + if count >= MAX_CHAIN: + raise ChainTooLong + if self.rcode() == dns.rcode.NXDOMAIN and answer is not None: + raise AnswerForNXDOMAIN + if answer is None: + # Further minimize the TTL with NCACHE. + auname = qname + while True: + # Look for an SOA RR whose owner name is a superdomain + # of qname. + try: + srrset = self.find_rrset( + self.authority, auname, question.rdclass, dns.rdatatype.SOA + ) + min_ttl = min(min_ttl, srrset.ttl, srrset[0].minimum) + break + except KeyError: + try: + auname = auname.parent() + except dns.name.NoParent: + break + return ChainingResult(qname, answer, min_ttl, cnames) + + def canonical_name(self) -> dns.name.Name: + """Return the canonical name of the first name in the question + section. + + Raises ``dns.message.NotQueryResponse`` if the message is not + a response. + + Raises ``dns.message.ChainTooLong`` if the CNAME chain is too long. + + Raises ``dns.message.AnswerForNXDOMAIN`` if the rcode is NXDOMAIN + but an answer was found. + + Raises ``dns.exception.FormError`` if the question count is not 1. + """ + return self.resolve_chaining().canonical_name + + +def _maybe_import_update(): + # We avoid circular imports by doing this here. We do it in another + # function as doing it in _message_factory_from_opcode() makes "dns" + # a local symbol, and the first line fails :) + + # pylint: disable=redefined-outer-name,import-outside-toplevel,unused-import + import dns.update # noqa: F401 + + +def _message_factory_from_opcode(opcode): + if opcode == dns.opcode.QUERY: + return QueryMessage + elif opcode == dns.opcode.UPDATE: + _maybe_import_update() + return dns.update.UpdateMessage + else: + return Message + + +class _WireReader: + + """Wire format reader. + + parser: the binary parser + message: The message object being built + initialize_message: Callback to set message parsing options + question_only: Are we only reading the question? + one_rr_per_rrset: Put each RR into its own RRset? + keyring: TSIG keyring + ignore_trailing: Ignore trailing junk at end of request? + multi: Is this message part of a multi-message sequence? + DNS dynamic updates. + continue_on_error: try to extract as much information as possible from + the message, accumulating MessageErrors in the *errors* attribute instead of + raising them. + """ + + def __init__( + self, + wire, + initialize_message, + question_only=False, + one_rr_per_rrset=False, + ignore_trailing=False, + keyring=None, + multi=False, + continue_on_error=False, + ): + self.parser = dns.wire.Parser(wire) + self.message = None + self.initialize_message = initialize_message + self.question_only = question_only + self.one_rr_per_rrset = one_rr_per_rrset + self.ignore_trailing = ignore_trailing + self.keyring = keyring + self.multi = multi + self.continue_on_error = continue_on_error + self.errors = [] + + def _get_question(self, section_number, qcount): + """Read the next *qcount* records from the wire data and add them to + the question section. + """ + assert self.message is not None + section = self.message.sections[section_number] + for _ in range(qcount): + qname = self.parser.get_name(self.message.origin) + (rdtype, rdclass) = self.parser.get_struct("!HH") + (rdclass, rdtype, _, _) = self.message._parse_rr_header( + section_number, qname, rdclass, rdtype + ) + self.message.find_rrset( + section, qname, rdclass, rdtype, create=True, force_unique=True + ) + + def _add_error(self, e): + self.errors.append(MessageError(e, self.parser.current)) + + def _get_section(self, section_number, count): + """Read the next I{count} records from the wire data and add them to + the specified section. + + section_number: the section of the message to which to add records + count: the number of records to read + """ + assert self.message is not None + section = self.message.sections[section_number] + force_unique = self.one_rr_per_rrset + for i in range(count): + rr_start = self.parser.current + absolute_name = self.parser.get_name() + if self.message.origin is not None: + name = absolute_name.relativize(self.message.origin) + else: + name = absolute_name + (rdtype, rdclass, ttl, rdlen) = self.parser.get_struct("!HHIH") + if rdtype in (dns.rdatatype.OPT, dns.rdatatype.TSIG): + ( + rdclass, + rdtype, + deleting, + empty, + ) = self.message._parse_special_rr_header( + section_number, count, i, name, rdclass, rdtype + ) + else: + (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( + section_number, name, rdclass, rdtype + ) + rdata_start = self.parser.current + try: + if empty: + if rdlen > 0: + raise dns.exception.FormError + rd = None + covers = dns.rdatatype.NONE + else: + with self.parser.restrict_to(rdlen): + rd = dns.rdata.from_wire_parser( + rdclass, rdtype, self.parser, self.message.origin + ) + covers = rd.covers() + if self.message.xfr and rdtype == dns.rdatatype.SOA: + force_unique = True + if rdtype == dns.rdatatype.OPT: + self.message.opt = dns.rrset.from_rdata(name, ttl, rd) + elif rdtype == dns.rdatatype.TSIG: + if self.keyring is None: + raise UnknownTSIGKey("got signed message without keyring") + if isinstance(self.keyring, dict): + key = self.keyring.get(absolute_name) + if isinstance(key, bytes): + key = dns.tsig.Key(absolute_name, key, rd.algorithm) + elif callable(self.keyring): + key = self.keyring(self.message, absolute_name) + else: + key = self.keyring + if key is None: + raise UnknownTSIGKey("key '%s' unknown" % name) + self.message.keyring = key + self.message.tsig_ctx = dns.tsig.validate( + self.parser.wire, + key, + absolute_name, + rd, + int(time.time()), + self.message.request_mac, + rr_start, + self.message.tsig_ctx, + self.multi, + ) + self.message.tsig = dns.rrset.from_rdata(absolute_name, 0, rd) + else: + rrset = self.message.find_rrset( + section, + name, + rdclass, + rdtype, + covers, + deleting, + True, + force_unique, + ) + if rd is not None: + if ttl > 0x7FFFFFFF: + ttl = 0 + rrset.add(rd, ttl) + except Exception as e: + if self.continue_on_error: + self._add_error(e) + self.parser.seek(rdata_start + rdlen) + else: + raise + + def read(self): + """Read a wire format DNS message and build a dns.message.Message + object.""" + + if self.parser.remaining() < 12: + raise ShortHeader + (id, flags, qcount, ancount, aucount, adcount) = self.parser.get_struct( + "!HHHHHH" + ) + factory = _message_factory_from_opcode(dns.opcode.from_flags(flags)) + self.message = factory(id=id) + self.message.flags = dns.flags.Flag(flags) + self.initialize_message(self.message) + self.one_rr_per_rrset = self.message._get_one_rr_per_rrset( + self.one_rr_per_rrset + ) + try: + self._get_question(MessageSection.QUESTION, qcount) + if self.question_only: + return self.message + self._get_section(MessageSection.ANSWER, ancount) + self._get_section(MessageSection.AUTHORITY, aucount) + self._get_section(MessageSection.ADDITIONAL, adcount) + if not self.ignore_trailing and self.parser.remaining() != 0: + raise TrailingJunk + if self.multi and self.message.tsig_ctx and not self.message.had_tsig: + self.message.tsig_ctx.update(self.parser.wire) + except Exception as e: + if self.continue_on_error: + self._add_error(e) + else: + raise + return self.message + + +def from_wire( + wire: bytes, + keyring: Optional[Any] = None, + request_mac: Optional[bytes] = b"", + xfr: bool = False, + origin: Optional[dns.name.Name] = None, + tsig_ctx: Optional[Union[dns.tsig.HMACTSig, dns.tsig.GSSTSig]] = None, + multi: bool = False, + question_only: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + continue_on_error: bool = False, +) -> Message: + """Convert a DNS wire format message into a message object. + + *keyring*, a ``dns.tsig.Key`` or ``dict``, the key or keyring to use if the message + is signed. + + *request_mac*, a ``bytes`` or ``None``. If the message is a response to a + TSIG-signed request, *request_mac* should be set to the MAC of that request. + + *xfr*, a ``bool``, should be set to ``True`` if this message is part of a zone + transfer. + + *origin*, a ``dns.name.Name`` or ``None``. If the message is part of a zone + transfer, *origin* should be the origin name of the zone. If not ``None``, names + will be relativized to the origin. + + *tsig_ctx*, a ``dns.tsig.HMACTSig`` or ``dns.tsig.GSSTSig`` object, the ongoing TSIG + context, used when validating zone transfers. + + *multi*, a ``bool``, should be set to ``True`` if this message is part of a multiple + message sequence. + + *question_only*, a ``bool``. If ``True``, read only up to the end of the question + section. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if the TC bit is + set. + + *continue_on_error*, a ``bool``. If ``True``, try to continue parsing even if + errors occur. Erroneous rdata will be ignored. Errors will be accumulated as a + list of MessageError objects in the message's ``errors`` attribute. This option is + recommended only for DNS analysis tools, or for use in a server as part of an error + handling path. The default is ``False``. + + Raises ``dns.message.ShortHeader`` if the message is less than 12 octets long. + + Raises ``dns.message.TrailingJunk`` if there were octets in the message past the end + of the proper DNS message, and *ignore_trailing* is ``False``. + + Raises ``dns.message.BadEDNS`` if an OPT record was in the wrong section, or + occurred more than once. + + Raises ``dns.message.BadTSIG`` if a TSIG record was not the last record of the + additional data section. + + Raises ``dns.message.Truncated`` if the TC flag is set and *raise_on_truncation* is + ``True``. + + Returns a ``dns.message.Message``. + """ + + # We permit None for request_mac solely for backwards compatibility + if request_mac is None: + request_mac = b"" + + def initialize_message(message): + message.request_mac = request_mac + message.xfr = xfr + message.origin = origin + message.tsig_ctx = tsig_ctx + + reader = _WireReader( + wire, + initialize_message, + question_only, + one_rr_per_rrset, + ignore_trailing, + keyring, + multi, + continue_on_error, + ) + try: + m = reader.read() + except dns.exception.FormError: + if ( + reader.message + and (reader.message.flags & dns.flags.TC) + and raise_on_truncation + ): + raise Truncated(message=reader.message) + else: + raise + # Reading a truncated message might not have any errors, so we + # have to do this check here too. + if m.flags & dns.flags.TC and raise_on_truncation: + raise Truncated(message=m) + if continue_on_error: + m.errors = reader.errors + + return m + + +class _TextReader: + + """Text format reader. + + tok: the tokenizer. + message: The message object being built. + DNS dynamic updates. + last_name: The most recently read name when building a message object. + one_rr_per_rrset: Put each RR into its own RRset? + origin: The origin for relative names + relativize: relativize names? + relativize_to: the origin to relativize to. + """ + + def __init__( + self, + text, + idna_codec, + one_rr_per_rrset=False, + origin=None, + relativize=True, + relativize_to=None, + ): + self.message = None + self.tok = dns.tokenizer.Tokenizer(text, idna_codec=idna_codec) + self.last_name = None + self.one_rr_per_rrset = one_rr_per_rrset + self.origin = origin + self.relativize = relativize + self.relativize_to = relativize_to + self.id = None + self.edns = -1 + self.ednsflags = 0 + self.payload = DEFAULT_EDNS_PAYLOAD + self.rcode = None + self.opcode = dns.opcode.QUERY + self.flags = 0 + + def _header_line(self, _): + """Process one line from the text format header section.""" + + token = self.tok.get() + what = token.value + if what == "id": + self.id = self.tok.get_int() + elif what == "flags": + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.flags = self.flags | dns.flags.from_text(token.value) + elif what == "edns": + self.edns = self.tok.get_int() + self.ednsflags = self.ednsflags | (self.edns << 16) + elif what == "eflags": + if self.edns < 0: + self.edns = 0 + while True: + token = self.tok.get() + if not token.is_identifier(): + self.tok.unget(token) + break + self.ednsflags = self.ednsflags | dns.flags.edns_from_text(token.value) + elif what == "payload": + self.payload = self.tok.get_int() + if self.edns < 0: + self.edns = 0 + elif what == "opcode": + text = self.tok.get_string() + self.opcode = dns.opcode.from_text(text) + self.flags = self.flags | dns.opcode.to_flags(self.opcode) + elif what == "rcode": + text = self.tok.get_string() + self.rcode = dns.rcode.from_text(text) + else: + raise UnknownHeaderField + self.tok.get_eol() + + def _question_line(self, section_number): + """Process one line from the text format question section.""" + + section = self.message.sections[section_number] + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name( + token, self.message.origin, self.relativize, self.relativize_to + ) + name = self.last_name + if name is None: + raise NoPreviousName + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + (rdclass, rdtype, _, _) = self.message._parse_rr_header( + section_number, name, rdclass, rdtype + ) + self.message.find_rrset( + section, name, rdclass, rdtype, create=True, force_unique=True + ) + self.tok.get_eol() + + def _rr_line(self, section_number): + """Process one line from the text format answer, authority, or + additional data sections. + """ + + section = self.message.sections[section_number] + # Name + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name( + token, self.message.origin, self.relativize, self.relativize_to + ) + name = self.last_name + if name is None: + raise NoPreviousName + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + # TTL + try: + ttl = int(token.value, 0) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + ttl = 0 + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = dns.rdataclass.IN + # Type + rdtype = dns.rdatatype.from_text(token.value) + (rdclass, rdtype, deleting, empty) = self.message._parse_rr_header( + section_number, name, rdclass, rdtype + ) + token = self.tok.get() + if empty and not token.is_eol_or_eof(): + raise dns.exception.SyntaxError + if not empty and token.is_eol_or_eof(): + raise dns.exception.UnexpectedEnd + if not token.is_eol_or_eof(): + self.tok.unget(token) + rd = dns.rdata.from_text( + rdclass, + rdtype, + self.tok, + self.message.origin, + self.relativize, + self.relativize_to, + ) + covers = rd.covers() + else: + rd = None + covers = dns.rdatatype.NONE + rrset = self.message.find_rrset( + section, + name, + rdclass, + rdtype, + covers, + deleting, + True, + self.one_rr_per_rrset, + ) + if rd is not None: + rrset.add(rd, ttl) + + def _make_message(self): + factory = _message_factory_from_opcode(self.opcode) + message = factory(id=self.id) + message.flags = self.flags + if self.edns >= 0: + message.use_edns(self.edns, self.ednsflags, self.payload) + if self.rcode: + message.set_rcode(self.rcode) + if self.origin: + message.origin = self.origin + return message + + def read(self): + """Read a text format DNS message and build a dns.message.Message + object.""" + + line_method = self._header_line + section_number = None + while 1: + token = self.tok.get(True, True) + if token.is_eol_or_eof(): + break + if token.is_comment(): + u = token.value.upper() + if u == "HEADER": + line_method = self._header_line + + if self.message: + message = self.message + else: + # If we don't have a message, create one with the current + # opcode, so that we know which section names to parse. + message = self._make_message() + try: + section_number = message._section_enum.from_text(u) + # We found a section name. If we don't have a message, + # use the one we just created. + if not self.message: + self.message = message + self.one_rr_per_rrset = message._get_one_rr_per_rrset( + self.one_rr_per_rrset + ) + if section_number == MessageSection.QUESTION: + line_method = self._question_line + else: + line_method = self._rr_line + except Exception: + # It's just a comment. + pass + self.tok.get_eol() + continue + self.tok.unget(token) + line_method(section_number) + if not self.message: + self.message = self._make_message() + return self.message + + +def from_text( + text: str, + idna_codec: Optional[dns.name.IDNACodec] = None, + one_rr_per_rrset: bool = False, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, +) -> Message: + """Convert the text format message into a message object. + + The reader stops after reading the first blank line in the input to + facilitate reading multiple messages from a single file with + ``dns.message.from_file()``. + + *text*, a ``str``, the text format message. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put + into its own rrset. The default is ``False``. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + reader = _TextReader( + text, idna_codec, one_rr_per_rrset, origin, relativize, relativize_to + ) + return reader.read() + + +def from_file( + f: Any, + idna_codec: Optional[dns.name.IDNACodec] = None, + one_rr_per_rrset: bool = False, +) -> Message: + """Read the next text format message from the specified file. + + Message blocks are separated by a single blank line. + + *f*, a ``file`` or ``str``. If *f* is text, it is treated as the + pathname of a file to open. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *one_rr_per_rrset*, a ``bool``. If ``True``, then each RR is put + into its own rrset. The default is ``False``. + + Raises ``dns.message.UnknownHeaderField`` if a header is unknown. + + Raises ``dns.exception.SyntaxError`` if the text is badly formed. + + Returns a ``dns.message.Message object`` + """ + + if isinstance(f, str): + cm: contextlib.AbstractContextManager = open(f) + else: + cm = contextlib.nullcontext(f) + with cm as f: + return from_text(f, idna_codec, one_rr_per_rrset) + assert False # for mypy lgtm[py/unreachable-statement] + + +def make_query( + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + use_edns: Optional[Union[int, bool]] = None, + want_dnssec: bool = False, + ednsflags: Optional[int] = None, + payload: Optional[int] = None, + request_payload: Optional[int] = None, + options: Optional[List[dns.edns.Option]] = None, + idna_codec: Optional[dns.name.IDNACodec] = None, + id: Optional[int] = None, + flags: int = dns.flags.RD, + pad: int = 0, +) -> QueryMessage: + """Make a query message. + + The query name, type, and class may all be specified either + as objects of the appropriate type, or as strings. + + The query will have a randomly chosen query id, and its DNS flags + will be set to dns.flags.RD. + + qname, a ``dns.name.Name`` or ``str``, the query name. + + *rdtype*, an ``int`` or ``str``, the desired rdata type. + + *rdclass*, an ``int`` or ``str``, the desired rdata class; the default + is class IN. + + *use_edns*, an ``int``, ``bool`` or ``None``. The EDNS level to use; the + default is ``None``. If ``None``, EDNS will be enabled only if other + parameters (*ednsflags*, *payload*, *request_payload*, or *options*) are + set. + See the description of dns.message.Message.use_edns() for the possible + values for use_edns and their meanings. + + *want_dnssec*, a ``bool``. If ``True``, DNSSEC data is desired. + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *request_payload*, an ``int``, is the EDNS payload size to use when + sending this message. If not specified, defaults to the value of + *payload*. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *id*, an ``int`` or ``None``, the desired query id. The default is + ``None``, which generates a random query id. + + *flags*, an ``int``, the desired query flags. The default is + ``dns.flags.RD``. + + *pad*, a non-negative ``int``. If 0, the default, do not pad; otherwise add + padding bytes to make the message size a multiple of *pad*. Note that if + padding is non-zero, an EDNS PADDING option will always be added to the + message. + + Returns a ``dns.message.QueryMessage`` + """ + + if isinstance(qname, str): + qname = dns.name.from_text(qname, idna_codec=idna_codec) + rdtype = dns.rdatatype.RdataType.make(rdtype) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + m = QueryMessage(id=id) + m.flags = dns.flags.Flag(flags) + m.find_rrset(m.question, qname, rdclass, rdtype, create=True, force_unique=True) + # only pass keywords on to use_edns if they have been set to a + # non-None value. Setting a field will turn EDNS on if it hasn't + # been configured. + kwargs: Dict[str, Any] = {} + if ednsflags is not None: + kwargs["ednsflags"] = ednsflags + if payload is not None: + kwargs["payload"] = payload + if request_payload is not None: + kwargs["request_payload"] = request_payload + if options is not None: + kwargs["options"] = options + if kwargs and use_edns is None: + use_edns = 0 + kwargs["edns"] = use_edns + kwargs["pad"] = pad + m.use_edns(**kwargs) + m.want_dnssec(want_dnssec) + return m + + +def make_response( + query: Message, + recursion_available: bool = False, + our_payload: int = 8192, + fudge: int = 300, + tsig_error: int = 0, +) -> Message: + """Make a message which is a response for the specified query. + The message returned is really a response skeleton; it has all + of the infrastructure required of a response, but none of the + content. + + The response's question section is a shallow copy of the query's + question section, so the query's question RRsets should not be + changed. + + *query*, a ``dns.message.Message``, the query to respond to. + + *recursion_available*, a ``bool``, should RA be set in the response? + + *our_payload*, an ``int``, the payload size to advertise in EDNS + responses. + + *fudge*, an ``int``, the TSIG time fudge. + + *tsig_error*, an ``int``, the TSIG error. + + Returns a ``dns.message.Message`` object whose specific class is + appropriate for the query. For example, if query is a + ``dns.update.UpdateMessage``, response will be too. + """ + + if query.flags & dns.flags.QR: + raise dns.exception.FormError("specified query message is not a query") + factory = _message_factory_from_opcode(query.opcode()) + response = factory(id=query.id) + response.flags = dns.flags.QR | (query.flags & dns.flags.RD) + if recursion_available: + response.flags |= dns.flags.RA + response.set_opcode(query.opcode()) + response.question = list(query.question) + if query.edns >= 0: + response.use_edns(0, 0, our_payload, query.payload) + if query.had_tsig: + response.use_tsig( + query.keyring, + query.keyname, + fudge, + None, + tsig_error, + b"", + query.keyalgorithm, + ) + response.request_mac = query.mac + return response + + +### BEGIN generated MessageSection constants + +QUESTION = MessageSection.QUESTION +ANSWER = MessageSection.ANSWER +AUTHORITY = MessageSection.AUTHORITY +ADDITIONAL = MessageSection.ADDITIONAL + +### END generated MessageSection constants diff --git a/backend/test/lib/python3.8/site-packages/dns/name.py b/backend/test/lib/python3.8/site-packages/dns/name.py new file mode 100644 index 0000000000000000000000000000000000000000..f452bfed7f636724dc551a7c323ca400a1f4e2b7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/name.py @@ -0,0 +1,1084 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Names. +""" + +import copy +import encodings.idna # type: ignore +import struct +from typing import Any, Dict, Iterable, Optional, Tuple, Union + +try: + import idna # type: ignore + + have_idna_2008 = True +except ImportError: # pragma: no cover + have_idna_2008 = False + +import dns.enum +import dns.exception +import dns.immutable +import dns.wire + +CompressType = Dict["Name", int] + + +class NameRelation(dns.enum.IntEnum): + """Name relation result from fullcompare().""" + + # This is an IntEnum for backwards compatibility in case anyone + # has hardwired the constants. + + #: The compared names have no relationship to each other. + NONE = 0 + #: the first name is a superdomain of the second. + SUPERDOMAIN = 1 + #: The first name is a subdomain of the second. + SUBDOMAIN = 2 + #: The compared names are equal. + EQUAL = 3 + #: The compared names have a common ancestor. + COMMONANCESTOR = 4 + + @classmethod + def _maximum(cls): + return cls.COMMONANCESTOR + + @classmethod + def _short_name(cls): + return cls.__name__ + + +# Backwards compatibility +NAMERELN_NONE = NameRelation.NONE +NAMERELN_SUPERDOMAIN = NameRelation.SUPERDOMAIN +NAMERELN_SUBDOMAIN = NameRelation.SUBDOMAIN +NAMERELN_EQUAL = NameRelation.EQUAL +NAMERELN_COMMONANCESTOR = NameRelation.COMMONANCESTOR + + +class EmptyLabel(dns.exception.SyntaxError): + """A DNS label is empty.""" + + +class BadEscape(dns.exception.SyntaxError): + """An escaped code in a text format of DNS name is invalid.""" + + +class BadPointer(dns.exception.FormError): + """A DNS compression pointer points forward instead of backward.""" + + +class BadLabelType(dns.exception.FormError): + """The label type in DNS name wire format is unknown.""" + + +class NeedAbsoluteNameOrOrigin(dns.exception.DNSException): + """An attempt was made to convert a non-absolute name to + wire when there was also a non-absolute (or missing) origin.""" + + +class NameTooLong(dns.exception.FormError): + """A DNS name is > 255 octets long.""" + + +class LabelTooLong(dns.exception.SyntaxError): + """A DNS label is > 63 octets long.""" + + +class AbsoluteConcatenation(dns.exception.DNSException): + """An attempt was made to append anything other than the + empty name to an absolute DNS name.""" + + +class NoParent(dns.exception.DNSException): + """An attempt was made to get the parent of the root name + or the empty name.""" + + +class NoIDNA2008(dns.exception.DNSException): + """IDNA 2008 processing was requested but the idna module is not + available.""" + + +class IDNAException(dns.exception.DNSException): + """IDNA processing raised an exception.""" + + supp_kwargs = {"idna_exception"} + fmt = "IDNA processing exception: {idna_exception}" + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +_escaped = b'"().;\\@$' +_escaped_text = '"().;\\@$' + + +def _escapify(label: Union[bytes, str]) -> str: + """Escape the characters in label which need it. + @returns: the escaped string + @rtype: string""" + if isinstance(label, bytes): + # Ordinary DNS label mode. Escape special characters and values + # < 0x20 or > 0x7f. + text = "" + for c in label: + if c in _escaped: + text += "\\" + chr(c) + elif c > 0x20 and c < 0x7F: + text += chr(c) + else: + text += "\\%03d" % c + return text + + # Unicode label mode. Escape only special characters and values < 0x20 + text = "" + for uc in label: + if uc in _escaped_text: + text += "\\" + uc + elif uc <= "\x20": + text += "\\%03d" % ord(uc) + else: + text += uc + return text + + +class IDNACodec: + """Abstract base class for IDNA encoder/decoders.""" + + def __init__(self): + pass + + def is_idna(self, label: bytes) -> bool: + return label.lower().startswith(b"xn--") + + def encode(self, label: str) -> bytes: + raise NotImplementedError # pragma: no cover + + def decode(self, label: bytes) -> str: + # We do not apply any IDNA policy on decode. + if self.is_idna(label): + try: + slabel = label[4:].decode("punycode") + return _escapify(slabel) + except Exception as e: + raise IDNAException(idna_exception=e) + else: + return _escapify(label) + + +class IDNA2003Codec(IDNACodec): + """IDNA 2003 encoder/decoder.""" + + def __init__(self, strict_decode: bool = False): + """Initialize the IDNA 2003 encoder/decoder. + + *strict_decode* is a ``bool``. If `True`, then IDNA2003 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2008. The default is `False`. + """ + + super().__init__() + self.strict_decode = strict_decode + + def encode(self, label: str) -> bytes: + """Encode *label*.""" + + if label == "": + return b"" + try: + return encodings.idna.ToASCII(label) + except UnicodeError: + raise LabelTooLong + + def decode(self, label: bytes) -> str: + """Decode *label*.""" + if not self.strict_decode: + return super().decode(label) + if label == b"": + return "" + try: + return _escapify(encodings.idna.ToUnicode(label)) + except Exception as e: + raise IDNAException(idna_exception=e) + + +class IDNA2008Codec(IDNACodec): + """IDNA 2008 encoder/decoder.""" + + def __init__( + self, + uts_46: bool = False, + transitional: bool = False, + allow_pure_ascii: bool = False, + strict_decode: bool = False, + ): + """Initialize the IDNA 2008 encoder/decoder. + + *uts_46* is a ``bool``. If True, apply Unicode IDNA + compatibility processing as described in Unicode Technical + Standard #46 (https://unicode.org/reports/tr46/). + If False, do not apply the mapping. The default is False. + + *transitional* is a ``bool``: If True, use the + "transitional" mode described in Unicode Technical Standard + #46. The default is False. + + *allow_pure_ascii* is a ``bool``. If True, then a label which + consists of only ASCII characters is allowed. This is less + strict than regular IDNA 2008, but is also necessary for mixed + names, e.g. a name with starting with "_sip._tcp." and ending + in an IDN suffix which would otherwise be disallowed. The + default is False. + + *strict_decode* is a ``bool``: If True, then IDNA2008 checking + is done when decoding. This can cause failures if the name + was encoded with IDNA2003. The default is False. + """ + super().__init__() + self.uts_46 = uts_46 + self.transitional = transitional + self.allow_pure_ascii = allow_pure_ascii + self.strict_decode = strict_decode + + def encode(self, label: str) -> bytes: + if label == "": + return b"" + if self.allow_pure_ascii and is_all_ascii(label): + encoded = label.encode("ascii") + if len(encoded) > 63: + raise LabelTooLong + return encoded + if not have_idna_2008: + raise NoIDNA2008 + try: + if self.uts_46: + label = idna.uts46_remap(label, False, self.transitional) + return idna.alabel(label) + except idna.IDNAError as e: + if e.args[0] == "Label too long": + raise LabelTooLong + else: + raise IDNAException(idna_exception=e) + + def decode(self, label: bytes) -> str: + if not self.strict_decode: + return super().decode(label) + if label == b"": + return "" + if not have_idna_2008: + raise NoIDNA2008 + try: + ulabel = idna.ulabel(label) + if self.uts_46: + ulabel = idna.uts46_remap(ulabel, False, self.transitional) + return _escapify(ulabel) + except (idna.IDNAError, UnicodeError) as e: + raise IDNAException(idna_exception=e) + + +IDNA_2003_Practical = IDNA2003Codec(False) +IDNA_2003_Strict = IDNA2003Codec(True) +IDNA_2003 = IDNA_2003_Practical +IDNA_2008_Practical = IDNA2008Codec(True, False, True, False) +IDNA_2008_UTS_46 = IDNA2008Codec(True, False, False, False) +IDNA_2008_Strict = IDNA2008Codec(False, False, False, True) +IDNA_2008_Transitional = IDNA2008Codec(True, True, False, False) +IDNA_2008 = IDNA_2008_Practical + + +def _validate_labels(labels: Tuple[bytes, ...]) -> None: + """Check for empty labels in the middle of a label sequence, + labels that are too long, and for too many labels. + + Raises ``dns.name.NameTooLong`` if the name as a whole is too long. + + Raises ``dns.name.EmptyLabel`` if a label is empty (i.e. the root + label) and appears in a position other than the end of the label + sequence + + """ + + l = len(labels) + total = 0 + i = -1 + j = 0 + for label in labels: + ll = len(label) + total += ll + 1 + if ll > 63: + raise LabelTooLong + if i < 0 and label == b"": + i = j + j += 1 + if total > 255: + raise NameTooLong + if i >= 0 and i != l - 1: + raise EmptyLabel + + +def _maybe_convert_to_binary(label: Union[bytes, str]) -> bytes: + """If label is ``str``, convert it to ``bytes``. If it is already + ``bytes`` just return it. + + """ + + if isinstance(label, bytes): + return label + if isinstance(label, str): + return label.encode() + raise ValueError # pragma: no cover + + +@dns.immutable.immutable +class Name: + + """A DNS name. + + The dns.name.Name class represents a DNS name as a tuple of + labels. Each label is a ``bytes`` in DNS wire format. Instances + of the class are immutable. + """ + + __slots__ = ["labels"] + + def __init__(self, labels: Iterable[Union[bytes, str]]): + """*labels* is any iterable whose values are ``str`` or ``bytes``.""" + + blabels = [_maybe_convert_to_binary(x) for x in labels] + self.labels = tuple(blabels) + _validate_labels(self.labels) + + def __copy__(self): + return Name(self.labels) + + def __deepcopy__(self, memo): + return Name(copy.deepcopy(self.labels, memo)) + + def __getstate__(self): + # Names can be pickled + return {"labels": self.labels} + + def __setstate__(self, state): + super().__setattr__("labels", state["labels"]) + _validate_labels(self.labels) + + def is_absolute(self) -> bool: + """Is the most significant label of this name the root label? + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[-1] == b"" + + def is_wild(self) -> bool: + """Is this name wild? (I.e. Is the least significant label '*'?) + + Returns a ``bool``. + """ + + return len(self.labels) > 0 and self.labels[0] == b"*" + + def __hash__(self) -> int: + """Return a case-insensitive hash of the name. + + Returns an ``int``. + """ + + h = 0 + for label in self.labels: + for c in label.lower(): + h += (h << 3) + c + return h + + def fullcompare(self, other: "Name") -> Tuple[NameRelation, int, int]: + """Compare two names, returning a 3-tuple + ``(relation, order, nlabels)``. + + *relation* describes the relation ship between the names, + and is one of: ``dns.name.NameRelation.NONE``, + ``dns.name.NameRelation.SUPERDOMAIN``, ``dns.name.NameRelation.SUBDOMAIN``, + ``dns.name.NameRelation.EQUAL``, or ``dns.name.NameRelation.COMMONANCESTOR``. + + *order* is < 0 if *self* < *other*, > 0 if *self* > *other*, and == + 0 if *self* == *other*. A relative name is always less than an + absolute name. If both names have the same relativity, then + the DNSSEC order relation is used to order them. + + *nlabels* is the number of significant labels that the two names + have in common. + + Here are some examples. Names ending in "." are absolute names, + those not ending in "." are relative names. + + ============= ============= =========== ===== ======= + self other relation order nlabels + ============= ============= =========== ===== ======= + www.example. www.example. equal 0 3 + www.example. example. subdomain > 0 2 + example. www.example. superdomain < 0 2 + example1.com. example2.com. common anc. < 0 2 + example1 example2. none < 0 0 + example1. example2 none > 0 0 + ============= ============= =========== ===== ======= + """ + + sabs = self.is_absolute() + oabs = other.is_absolute() + if sabs != oabs: + if sabs: + return (NameRelation.NONE, 1, 0) + else: + return (NameRelation.NONE, -1, 0) + l1 = len(self.labels) + l2 = len(other.labels) + ldiff = l1 - l2 + if ldiff < 0: + l = l1 + else: + l = l2 + + order = 0 + nlabels = 0 + namereln = NameRelation.NONE + while l > 0: + l -= 1 + l1 -= 1 + l2 -= 1 + label1 = self.labels[l1].lower() + label2 = other.labels[l2].lower() + if label1 < label2: + order = -1 + if nlabels > 0: + namereln = NameRelation.COMMONANCESTOR + return (namereln, order, nlabels) + elif label1 > label2: + order = 1 + if nlabels > 0: + namereln = NameRelation.COMMONANCESTOR + return (namereln, order, nlabels) + nlabels += 1 + order = ldiff + if ldiff < 0: + namereln = NameRelation.SUPERDOMAIN + elif ldiff > 0: + namereln = NameRelation.SUBDOMAIN + else: + namereln = NameRelation.EQUAL + return (namereln, order, nlabels) + + def is_subdomain(self, other: "Name") -> bool: + """Is self a subdomain of other? + + Note that the notion of subdomain includes equality, e.g. + "dnspython.org" is a subdomain of itself. + + Returns a ``bool``. + """ + + (nr, _, _) = self.fullcompare(other) + if nr == NameRelation.SUBDOMAIN or nr == NameRelation.EQUAL: + return True + return False + + def is_superdomain(self, other: "Name") -> bool: + """Is self a superdomain of other? + + Note that the notion of superdomain includes equality, e.g. + "dnspython.org" is a superdomain of itself. + + Returns a ``bool``. + """ + + (nr, _, _) = self.fullcompare(other) + if nr == NameRelation.SUPERDOMAIN or nr == NameRelation.EQUAL: + return True + return False + + def canonicalize(self) -> "Name": + """Return a name which is equal to the current name, but is in + DNSSEC canonical form. + """ + + return Name([x.lower() for x in self.labels]) + + def __eq__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] == 0 + else: + return False + + def __ne__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] != 0 + else: + return True + + def __lt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] < 0 + else: + return NotImplemented + + def __le__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] <= 0 + else: + return NotImplemented + + def __ge__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] >= 0 + else: + return NotImplemented + + def __gt__(self, other): + if isinstance(other, Name): + return self.fullcompare(other)[1] > 0 + else: + return NotImplemented + + def __repr__(self): + return "<DNS name " + self.__str__() + ">" + + def __str__(self): + return self.to_text(False) + + def to_text(self, omit_final_dot: bool = False) -> str: + """Convert name to DNS text format. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + + Returns a ``str``. + """ + + if len(self.labels) == 0: + return "@" + if len(self.labels) == 1 and self.labels[0] == b"": + return "." + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + s = ".".join(map(_escapify, l)) + return s + + def to_unicode( + self, omit_final_dot: bool = False, idna_codec: Optional[IDNACodec] = None + ) -> str: + """Convert name to Unicode text format. + + IDN ACE labels are converted to Unicode. + + *omit_final_dot* is a ``bool``. If True, don't emit the final + dot (denoting the root label) for absolute names. The default + is False. + *idna_codec* specifies the IDNA encoder/decoder. If None, the + dns.name.IDNA_2003_Practical encoder/decoder is used. + The IDNA_2003_Practical decoder does + not impose any policy, it just decodes punycode, so if you + don't want checking for compliance, you can use this decoder + for IDNA2008 as well. + + Returns a ``str``. + """ + + if len(self.labels) == 0: + return "@" + if len(self.labels) == 1 and self.labels[0] == b"": + return "." + if omit_final_dot and self.is_absolute(): + l = self.labels[:-1] + else: + l = self.labels + if idna_codec is None: + idna_codec = IDNA_2003_Practical + return ".".join([idna_codec.decode(x) for x in l]) + + def to_digestable(self, origin: Optional["Name"] = None) -> bytes: + """Convert name to a format suitable for digesting in hashes. + + The name is canonicalized and converted to uncompressed wire + format. All names in wire format are absolute. If the name + is a relative name, then an origin must be supplied. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then origin will be appended + to the name. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``bytes``. + """ + + digest = self.to_wire(origin=origin, canonicalize=True) + assert digest is not None + return digest + + def to_wire( + self, + file: Optional[Any] = None, + compress: Optional[CompressType] = None, + origin: Optional["Name"] = None, + canonicalize: bool = False, + ) -> Optional[bytes]: + """Convert name to wire format, possibly compressing it. + + *file* is the file where the name is emitted (typically an + io.BytesIO file). If ``None`` (the default), a ``bytes`` + containing the wire name will be returned. + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. Note that + the compression code assumes that compression offset 0 is the + start of *file*, and thus compression will not be correct + if this is not the case. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + *canonicalize*, a ``bool``, indicates whether the name should + be canonicalized; that is, converted to a format suitable for + digesting in hashes. + + Raises ``dns.name.NeedAbsoluteNameOrOrigin`` if the name is + relative and no origin was provided. + + Returns a ``bytes`` or ``None``. + """ + + if file is None: + out = bytearray() + for label in self.labels: + out.append(len(label)) + if canonicalize: + out += label.lower() + else: + out += label + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + for label in origin.labels: + out.append(len(label)) + if canonicalize: + out += label.lower() + else: + out += label + return bytes(out) + + labels: Iterable[bytes] + if not self.is_absolute(): + if origin is None or not origin.is_absolute(): + raise NeedAbsoluteNameOrOrigin + labels = list(self.labels) + labels.extend(list(origin.labels)) + else: + labels = self.labels + i = 0 + for label in labels: + n = Name(labels[i:]) + i += 1 + if compress is not None: + pos = compress.get(n) + else: + pos = None + if pos is not None: + value = 0xC000 + pos + s = struct.pack("!H", value) + file.write(s) + break + else: + if compress is not None and len(n) > 1: + pos = file.tell() + if pos <= 0x3FFF: + compress[n] = pos + l = len(label) + file.write(struct.pack("!B", l)) + if l > 0: + if canonicalize: + file.write(label.lower()) + else: + file.write(label) + return None + + def __len__(self) -> int: + """The length of the name (in labels). + + Returns an ``int``. + """ + + return len(self.labels) + + def __getitem__(self, index): + return self.labels[index] + + def __add__(self, other): + return self.concatenate(other) + + def __sub__(self, other): + return self.relativize(other) + + def split(self, depth: int) -> Tuple["Name", "Name"]: + """Split a name into a prefix and suffix names at the specified depth. + + *depth* is an ``int`` specifying the number of labels in the suffix + + Raises ``ValueError`` if *depth* was not >= 0 and <= the length of the + name. + + Returns the tuple ``(prefix, suffix)``. + """ + + l = len(self.labels) + if depth == 0: + return (self, dns.name.empty) + elif depth == l: + return (dns.name.empty, self) + elif depth < 0 or depth > l: + raise ValueError("depth must be >= 0 and <= the length of the name") + return (Name(self[:-depth]), Name(self[-depth:])) + + def concatenate(self, other: "Name") -> "Name": + """Return a new name which is the concatenation of self and other. + + Raises ``dns.name.AbsoluteConcatenation`` if the name is + absolute and *other* is not the empty name. + + Returns a ``dns.name.Name``. + """ + + if self.is_absolute() and len(other) > 0: + raise AbsoluteConcatenation + labels = list(self.labels) + labels.extend(list(other.labels)) + return Name(labels) + + def relativize(self, origin: "Name") -> "Name": + """If the name is a subdomain of *origin*, return a new name which is + the name relative to origin. Otherwise return the name. + + For example, relativizing ``www.dnspython.org.`` to origin + ``dnspython.org.`` returns the name ``www``. Relativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if origin is not None and self.is_subdomain(origin): + return Name(self[: -len(origin)]) + else: + return self + + def derelativize(self, origin: "Name") -> "Name": + """If the name is a relative name, return a new name which is the + concatenation of the name and origin. Otherwise return the name. + + For example, derelativizing ``www`` to origin ``dnspython.org.`` + returns the name ``www.dnspython.org.``. Derelativizing ``example.`` + to origin ``dnspython.org.`` returns ``example.``. + + Returns a ``dns.name.Name``. + """ + + if not self.is_absolute(): + return self.concatenate(origin) + else: + return self + + def choose_relativity( + self, origin: Optional["Name"] = None, relativize: bool = True + ) -> "Name": + """Return a name with the relativity desired by the caller. + + If *origin* is ``None``, then the name is returned. + Otherwise, if *relativize* is ``True`` the name is + relativized, and if *relativize* is ``False`` the name is + derelativized. + + Returns a ``dns.name.Name``. + """ + + if origin: + if relativize: + return self.relativize(origin) + else: + return self.derelativize(origin) + else: + return self + + def parent(self) -> "Name": + """Return the parent of the name. + + For example, the parent of ``www.dnspython.org.`` is ``dnspython.org``. + + Raises ``dns.name.NoParent`` if the name is either the root name or the + empty name, and thus has no parent. + + Returns a ``dns.name.Name``. + """ + + if self == root or self == empty: + raise NoParent + return Name(self.labels[1:]) + + +#: The root name, '.' +root = Name([b""]) + +#: The empty name. +empty = Name([]) + + +def from_unicode( + text: str, origin: Optional[Name] = root, idna_codec: Optional[IDNACodec] = None +) -> Name: + """Convert unicode text into a Name object. + + Labels are encoded in IDN ACE form according to rules specified by + the IDNA codec. + + *text*, a ``str``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if not isinstance(text, str): + raise ValueError("input to from_unicode() must be a unicode string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = "" + escaping = False + edigits = 0 + total = 0 + if idna_codec is None: + idna_codec = IDNA_2003 + if text == "@": + text = "" + if text: + if text in [".", "\u3002", "\uff0e", "\uff61"]: + return Name([b""]) # no Unicode "u" on this constant! + for c in text: + if escaping: + if edigits == 0: + if c.isdigit(): + total = int(c) + edigits += 1 + else: + label += c + escaping = False + else: + if not c.isdigit(): + raise BadEscape + total *= 10 + total += int(c) + edigits += 1 + if edigits == 3: + escaping = False + label += chr(total) + elif c in [".", "\u3002", "\uff0e", "\uff61"]: + if len(label) == 0: + raise EmptyLabel + labels.append(idna_codec.encode(label)) + label = "" + elif c == "\\": + escaping = True + edigits = 0 + total = 0 + else: + label += c + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(idna_codec.encode(label)) + else: + labels.append(b"") + + if (len(labels) == 0 or labels[-1] != b"") and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + + +def is_all_ascii(text: str) -> bool: + for c in text: + if ord(c) > 0x7F: + return False + return True + + +def from_text( + text: Union[bytes, str], + origin: Optional[Name] = root, + idna_codec: Optional[IDNACodec] = None, +) -> Name: + """Convert text into a Name object. + + *text*, a ``bytes`` or ``str``, is the text to convert into a name. + + *origin*, a ``dns.name.Name``, specifies the origin to + append to non-absolute names. The default is the root name. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + Returns a ``dns.name.Name``. + """ + + if isinstance(text, str): + if not is_all_ascii(text): + # Some codepoint in the input text is > 127, so IDNA applies. + return from_unicode(text, origin, idna_codec) + # The input is all ASCII, so treat this like an ordinary non-IDNA + # domain name. Note that "all ASCII" is about the input text, + # not the codepoints in the domain name. E.g. if text has value + # + # r'\150\151\152\153\154\155\156\157\158\159' + # + # then it's still "all ASCII" even though the domain name has + # codepoints > 127. + text = text.encode("ascii") + if not isinstance(text, bytes): + raise ValueError("input to from_text() must be a string") + if not (origin is None or isinstance(origin, Name)): + raise ValueError("origin must be a Name or None") + labels = [] + label = b"" + escaping = False + edigits = 0 + total = 0 + if text == b"@": + text = b"" + if text: + if text == b".": + return Name([b""]) + for c in text: + byte_ = struct.pack("!B", c) + if escaping: + if edigits == 0: + if byte_.isdigit(): + total = int(byte_) + edigits += 1 + else: + label += byte_ + escaping = False + else: + if not byte_.isdigit(): + raise BadEscape + total *= 10 + total += int(byte_) + edigits += 1 + if edigits == 3: + escaping = False + label += struct.pack("!B", total) + elif byte_ == b".": + if len(label) == 0: + raise EmptyLabel + labels.append(label) + label = b"" + elif byte_ == b"\\": + escaping = True + edigits = 0 + total = 0 + else: + label += byte_ + if escaping: + raise BadEscape + if len(label) > 0: + labels.append(label) + else: + labels.append(b"") + if (len(labels) == 0 or labels[-1] != b"") and origin is not None: + labels.extend(list(origin.labels)) + return Name(labels) + + +# we need 'dns.wire.Parser' quoted as dns.name and dns.wire depend on each other. + + +def from_wire_parser(parser: "dns.wire.Parser") -> Name: + """Convert possibly compressed wire format into a Name. + + *parser* is a dns.wire.Parser. + + Raises ``dns.name.BadPointer`` if a compression pointer did not + point backwards in the message. + + Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. + + Returns a ``dns.name.Name`` + """ + + labels = [] + biggest_pointer = parser.current + with parser.restore_furthest(): + count = parser.get_uint8() + while count != 0: + if count < 64: + labels.append(parser.get_bytes(count)) + elif count >= 192: + current = (count & 0x3F) * 256 + parser.get_uint8() + if current >= biggest_pointer: + raise BadPointer + biggest_pointer = current + parser.seek(current) + else: + raise BadLabelType + count = parser.get_uint8() + labels.append(b"") + return Name(labels) + + +def from_wire(message: bytes, current: int) -> Tuple[Name, int]: + """Convert possibly compressed wire format into a Name. + + *message* is a ``bytes`` containing an entire DNS message in DNS + wire form. + + *current*, an ``int``, is the offset of the beginning of the name + from the start of the message + + Raises ``dns.name.BadPointer`` if a compression pointer did not + point backwards in the message. + + Raises ``dns.name.BadLabelType`` if an invalid label type was encountered. + + Returns a ``(dns.name.Name, int)`` tuple consisting of the name + that was read and the number of bytes of the wire format message + which were consumed reading it. + """ + + if not isinstance(message, bytes): + raise ValueError("input to from_wire() must be a byte string") + parser = dns.wire.Parser(message, current) + name = from_wire_parser(parser) + return (name, parser.current - current) diff --git a/backend/test/lib/python3.8/site-packages/dns/namedict.py b/backend/test/lib/python3.8/site-packages/dns/namedict.py new file mode 100644 index 0000000000000000000000000000000000000000..ca8b19789b86a3482110b48d532999bb4cf277b7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/namedict.py @@ -0,0 +1,109 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# Copyright (C) 2016 Coresec Systems AB +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND CORESEC SYSTEMS AB DISCLAIMS ALL +# WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL CORESEC +# SYSTEMS AB BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, +# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION +# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS name dictionary""" + +# pylint seems to be confused about this one! +from collections.abc import MutableMapping # pylint: disable=no-name-in-module + +import dns.name + + +class NameDict(MutableMapping): + """A dictionary whose keys are dns.name.Name objects. + + In addition to being like a regular Python dictionary, this + dictionary can also get the deepest match for a given key. + """ + + __slots__ = ["max_depth", "max_depth_items", "__store"] + + def __init__(self, *args, **kwargs): + super().__init__() + self.__store = dict() + #: the maximum depth of the keys that have ever been added + self.max_depth = 0 + #: the number of items of maximum depth + self.max_depth_items = 0 + self.update(dict(*args, **kwargs)) + + def __update_max_depth(self, key): + if len(key) == self.max_depth: + self.max_depth_items = self.max_depth_items + 1 + elif len(key) > self.max_depth: + self.max_depth = len(key) + self.max_depth_items = 1 + + def __getitem__(self, key): + return self.__store[key] + + def __setitem__(self, key, value): + if not isinstance(key, dns.name.Name): + raise ValueError("NameDict key must be a name") + self.__store[key] = value + self.__update_max_depth(key) + + def __delitem__(self, key): + self.__store.pop(key) + if len(key) == self.max_depth: + self.max_depth_items = self.max_depth_items - 1 + if self.max_depth_items == 0: + self.max_depth = 0 + for k in self.__store: + self.__update_max_depth(k) + + def __iter__(self): + return iter(self.__store) + + def __len__(self): + return len(self.__store) + + def has_key(self, key): + return key in self.__store + + def get_deepest_match(self, name): + """Find the deepest match to *name* in the dictionary. + + The deepest match is the longest name in the dictionary which is + a superdomain of *name*. Note that *superdomain* includes matching + *name* itself. + + *name*, a ``dns.name.Name``, the name to find. + + Returns a ``(key, value)`` where *key* is the deepest + ``dns.name.Name``, and *value* is the value associated with *key*. + """ + + depth = len(name) + if depth > self.max_depth: + depth = self.max_depth + for i in range(-depth, 0): + n = dns.name.Name(name[i:]) + if n in self: + return (n, self[n]) + v = self[dns.name.empty] + return (dns.name.empty, v) diff --git a/backend/test/lib/python3.8/site-packages/dns/nameserver.py b/backend/test/lib/python3.8/site-packages/dns/nameserver.py new file mode 100644 index 0000000000000000000000000000000000000000..5910139ed4d800773689f3de000e1480cf3eff54 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/nameserver.py @@ -0,0 +1,329 @@ +from typing import Optional, Union +from urllib.parse import urlparse + +import dns.asyncbackend +import dns.asyncquery +import dns.inet +import dns.message +import dns.query + + +class Nameserver: + def __init__(self): + pass + + def __str__(self): + raise NotImplementedError + + def kind(self) -> str: + raise NotImplementedError + + def is_always_max_size(self) -> bool: + raise NotImplementedError + + def answer_nameserver(self) -> str: + raise NotImplementedError + + def answer_port(self) -> int: + raise NotImplementedError + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + raise NotImplementedError + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + raise NotImplementedError + + +class AddressAndPortNameserver(Nameserver): + def __init__(self, address: str, port: int): + super().__init__() + self.address = address + self.port = port + + def kind(self) -> str: + raise NotImplementedError + + def is_always_max_size(self) -> bool: + return False + + def __str__(self): + ns_kind = self.kind() + return f"{ns_kind}:{self.address}@{self.port}" + + def answer_nameserver(self) -> str: + return self.address + + def answer_port(self) -> int: + return self.port + + +class Do53Nameserver(AddressAndPortNameserver): + def __init__(self, address: str, port: int = 53): + super().__init__(address, port) + + def kind(self): + return "Do53" + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + if max_size: + response = dns.query.tcp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + else: + response = dns.query.udp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + raise_on_truncation=True, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + return response + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + if max_size: + response = await dns.asyncquery.tcp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + backend=backend, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + else: + response = await dns.asyncquery.udp( + request, + self.address, + timeout=timeout, + port=self.port, + source=source, + source_port=source_port, + raise_on_truncation=True, + backend=backend, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + return response + + +class DoHNameserver(Nameserver): + def __init__(self, url: str, bootstrap_address: Optional[str] = None): + super().__init__() + self.url = url + self.bootstrap_address = bootstrap_address + + def kind(self): + return "DoH" + + def is_always_max_size(self) -> bool: + return True + + def __str__(self): + return self.url + + def answer_nameserver(self) -> str: + return self.url + + def answer_port(self) -> int: + port = urlparse(self.url).port + if port is None: + port = 443 + return port + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return dns.query.https( + request, + self.url, + timeout=timeout, + bootstrap_address=self.bootstrap_address, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return await dns.asyncquery.https( + request, + self.url, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + + +class DoTNameserver(AddressAndPortNameserver): + def __init__(self, address: str, port: int = 853, hostname: Optional[str] = None): + super().__init__(address, port) + self.hostname = hostname + + def kind(self): + return "DoT" + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return dns.query.tls( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + server_hostname=self.hostname, + ) + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return await dns.asyncquery.tls( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + server_hostname=self.hostname, + ) + + +class DoQNameserver(AddressAndPortNameserver): + def __init__( + self, + address: str, + port: int = 853, + verify: Union[bool, str] = True, + server_hostname: Optional[str] = None, + ): + super().__init__(address, port) + self.verify = verify + self.server_hostname = server_hostname + + def kind(self): + return "DoQ" + + def query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return dns.query.quic( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + server_hostname=self.server_hostname, + ) + + async def async_query( + self, + request: dns.message.QueryMessage, + timeout: float, + source: Optional[str], + source_port: int, + max_size: bool, + backend: dns.asyncbackend.Backend, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + ) -> dns.message.Message: + return await dns.asyncquery.quic( + request, + self.address, + port=self.port, + timeout=timeout, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + verify=self.verify, + server_hostname=self.server_hostname, + ) diff --git a/backend/test/lib/python3.8/site-packages/dns/node.py b/backend/test/lib/python3.8/site-packages/dns/node.py new file mode 100644 index 0000000000000000000000000000000000000000..c670243c527a9aa3da4e33e1ef7185658c3f8d52 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/node.py @@ -0,0 +1,360 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS nodes. A node is a set of rdatasets.""" + +import enum +import io +from typing import Any, Dict, Optional + +import dns.immutable +import dns.name +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.renderer +import dns.rrset + +_cname_types = { + dns.rdatatype.CNAME, +} + +# "neutral" types can coexist with a CNAME and thus are not "other data" +_neutral_types = { + dns.rdatatype.NSEC, # RFC 4035 section 2.5 + dns.rdatatype.NSEC3, # This is not likely to happen, but not impossible! + dns.rdatatype.KEY, # RFC 4035 section 2.5, RFC 3007 +} + + +def _matches_type_or_its_signature(rdtypes, rdtype, covers): + return rdtype in rdtypes or (rdtype == dns.rdatatype.RRSIG and covers in rdtypes) + + +@enum.unique +class NodeKind(enum.Enum): + """Rdatasets in nodes""" + + REGULAR = 0 # a.k.a "other data" + NEUTRAL = 1 + CNAME = 2 + + @classmethod + def classify( + cls, rdtype: dns.rdatatype.RdataType, covers: dns.rdatatype.RdataType + ) -> "NodeKind": + if _matches_type_or_its_signature(_cname_types, rdtype, covers): + return NodeKind.CNAME + elif _matches_type_or_its_signature(_neutral_types, rdtype, covers): + return NodeKind.NEUTRAL + else: + return NodeKind.REGULAR + + @classmethod + def classify_rdataset(cls, rdataset: dns.rdataset.Rdataset) -> "NodeKind": + return cls.classify(rdataset.rdtype, rdataset.covers) + + +class Node: + + """A Node is a set of rdatasets. + + A node is either a CNAME node or an "other data" node. A CNAME + node contains only CNAME, KEY, NSEC, and NSEC3 rdatasets along with their + covering RRSIG rdatasets. An "other data" node contains any + rdataset other than a CNAME or RRSIG(CNAME) rdataset. When + changes are made to a node, the CNAME or "other data" state is + always consistent with the update, i.e. the most recent change + wins. For example, if you have a node which contains a CNAME + rdataset, and then add an MX rdataset to it, then the CNAME + rdataset will be deleted. Likewise if you have a node containing + an MX rdataset and add a CNAME rdataset, the MX rdataset will be + deleted. + """ + + __slots__ = ["rdatasets"] + + def __init__(self): + # the set of rdatasets, represented as a list. + self.rdatasets = [] + + def to_text(self, name: dns.name.Name, **kw: Dict[str, Any]) -> str: + """Convert a node to text format. + + Each rdataset at the node is printed. Any keyword arguments + to this method are passed on to the rdataset's to_text() method. + + *name*, a ``dns.name.Name``, the owner name of the + rdatasets. + + Returns a ``str``. + + """ + + s = io.StringIO() + for rds in self.rdatasets: + if len(rds) > 0: + s.write(rds.to_text(name, **kw)) # type: ignore[arg-type] + s.write("\n") + return s.getvalue()[:-1] + + def __repr__(self): + return "<DNS node " + str(id(self)) + ">" + + def __eq__(self, other): + # + # This is inefficient. Good thing we don't need to do it much. + # + for rd in self.rdatasets: + if rd not in other.rdatasets: + return False + for rd in other.rdatasets: + if rd not in self.rdatasets: + return False + return True + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.rdatasets) + + def __iter__(self): + return iter(self.rdatasets) + + def _append_rdataset(self, rdataset): + """Append rdataset to the node with special handling for CNAME and + other data conditions. + + Specifically, if the rdataset being appended has ``NodeKind.CNAME``, + then all rdatasets other than KEY, NSEC, NSEC3, and their covering + RRSIGs are deleted. If the rdataset being appended has + ``NodeKind.REGULAR`` then CNAME and RRSIG(CNAME) are deleted. + """ + # Make having just one rdataset at the node fast. + if len(self.rdatasets) > 0: + kind = NodeKind.classify_rdataset(rdataset) + if kind == NodeKind.CNAME: + self.rdatasets = [ + rds + for rds in self.rdatasets + if NodeKind.classify_rdataset(rds) != NodeKind.REGULAR + ] + elif kind == NodeKind.REGULAR: + self.rdatasets = [ + rds + for rds in self.rdatasets + if NodeKind.classify_rdataset(rds) != NodeKind.CNAME + ] + # Otherwise the rdataset is NodeKind.NEUTRAL and we do not need to + # edit self.rdatasets. + self.rdatasets.append(rdataset) + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + """Find an rdataset matching the specified properties in the + current node. + + *rdclass*, a ``dns.rdataclass.RdataClass``, the class of the rdataset. + + *rdtype*, a ``dns.rdatatype.RdataType``, the type of the rdataset. + + *covers*, a ``dns.rdatatype.RdataType``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Raises ``KeyError`` if an rdataset of the desired type and class does + not exist and *create* is not ``True``. + + Returns a ``dns.rdataset.Rdataset``. + """ + + for rds in self.rdatasets: + if rds.match(rdclass, rdtype, covers): + return rds + if not create: + raise KeyError + rds = dns.rdataset.Rdataset(rdclass, rdtype, covers) + self._append_rdataset(rds) + return rds + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + """Get an rdataset matching the specified properties in the + current node. + + None is returned if an rdataset of the specified type and + class does not exist and *create* is not ``True``. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. Usually this value is + dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or + dns.rdatatype.RRSIG, then the covers value will be the rdata + type the SIG/RRSIG covers. The library treats the SIG and RRSIG + types as if they were a family of + types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much + easier to work with than if RRSIGs covering different rdata + types were aggregated into a single RRSIG rdataset. + + *create*, a ``bool``. If True, create the rdataset if it is not found. + + Returns a ``dns.rdataset.Rdataset`` or ``None``. + """ + + try: + rds = self.find_rdataset(rdclass, rdtype, covers, create) + except KeyError: + rds = None + return rds + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + """Delete the rdataset matching the specified properties in the + current node. + + If a matching rdataset does not exist, it is not an error. + + *rdclass*, an ``int``, the class of the rdataset. + + *rdtype*, an ``int``, the type of the rdataset. + + *covers*, an ``int``, the covered type. + """ + + rds = self.get_rdataset(rdclass, rdtype, covers) + if rds is not None: + self.rdatasets.remove(rds) + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + """Replace an rdataset. + + It is not an error if there is no rdataset matching *replacement*. + + Ownership of the *replacement* object is transferred to the node; + in other words, this method does not store a copy of *replacement* + at the node, it stores *replacement* itself. + + *replacement*, a ``dns.rdataset.Rdataset``. + + Raises ``ValueError`` if *replacement* is not a + ``dns.rdataset.Rdataset``. + """ + + if not isinstance(replacement, dns.rdataset.Rdataset): + raise ValueError("replacement is not an rdataset") + if isinstance(replacement, dns.rrset.RRset): + # RRsets are not good replacements as the match() method + # is not compatible. + replacement = replacement.to_rdataset() + self.delete_rdataset( + replacement.rdclass, replacement.rdtype, replacement.covers + ) + self._append_rdataset(replacement) + + def classify(self) -> NodeKind: + """Classify a node. + + A node which contains a CNAME or RRSIG(CNAME) is a + ``NodeKind.CNAME`` node. + + A node which contains only "neutral" types, i.e. types allowed to + co-exist with a CNAME, is a ``NodeKind.NEUTRAL`` node. The neutral + types are NSEC, NSEC3, KEY, and their associated RRSIGS. An empty node + is also considered neutral. + + A node which contains some rdataset which is not a CNAME, RRSIG(CNAME), + or a neutral type is a a ``NodeKind.REGULAR`` node. Regular nodes are + also commonly referred to as "other data". + """ + for rdataset in self.rdatasets: + kind = NodeKind.classify(rdataset.rdtype, rdataset.covers) + if kind != NodeKind.NEUTRAL: + return kind + return NodeKind.NEUTRAL + + def is_immutable(self) -> bool: + return False + + +@dns.immutable.immutable +class ImmutableNode(Node): + def __init__(self, node): + super().__init__() + self.rdatasets = tuple( + [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] + ) + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise TypeError("immutable") + return super().find_rdataset(rdclass, rdtype, covers, False) + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + if create: + raise TypeError("immutable") + return super().get_rdataset(rdclass, rdtype, covers, False) + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + raise TypeError("immutable") + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + raise TypeError("immutable") + + def is_immutable(self) -> bool: + return True diff --git a/backend/test/lib/python3.8/site-packages/dns/opcode.py b/backend/test/lib/python3.8/site-packages/dns/opcode.py new file mode 100644 index 0000000000000000000000000000000000000000..78b43d2cbd1404b57f683b2bfad7f726e99caffd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/opcode.py @@ -0,0 +1,117 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Opcodes.""" + +import dns.enum +import dns.exception + + +class Opcode(dns.enum.IntEnum): + #: Query + QUERY = 0 + #: Inverse Query (historical) + IQUERY = 1 + #: Server Status (unspecified and unimplemented anywhere) + STATUS = 2 + #: Notify + NOTIFY = 4 + #: Dynamic Update + UPDATE = 5 + + @classmethod + def _maximum(cls): + return 15 + + @classmethod + def _unknown_exception_class(cls): + return UnknownOpcode + + +class UnknownOpcode(dns.exception.DNSException): + """An DNS opcode is unknown.""" + + +def from_text(text: str) -> Opcode: + """Convert text into an opcode. + + *text*, a ``str``, the textual opcode + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns an ``int``. + """ + + return Opcode.from_text(text) + + +def from_flags(flags: int) -> Opcode: + """Extract an opcode from DNS message flags. + + *flags*, an ``int``, the DNS flags. + + Returns an ``int``. + """ + + return Opcode((flags & 0x7800) >> 11) + + +def to_flags(value: Opcode) -> int: + """Convert an opcode to a value suitable for ORing into DNS message + flags. + + *value*, an ``int``, the DNS opcode value. + + Returns an ``int``. + """ + + return (value << 11) & 0x7800 + + +def to_text(value: Opcode) -> str: + """Convert an opcode to text. + + *value*, an ``int`` the opcode value, + + Raises ``dns.opcode.UnknownOpcode`` if the opcode is unknown. + + Returns a ``str``. + """ + + return Opcode.to_text(value) + + +def is_update(flags: int) -> bool: + """Is the opcode in flags UPDATE? + + *flags*, an ``int``, the DNS message flags. + + Returns a ``bool``. + """ + + return from_flags(flags) == Opcode.UPDATE + + +### BEGIN generated Opcode constants + +QUERY = Opcode.QUERY +IQUERY = Opcode.IQUERY +STATUS = Opcode.STATUS +NOTIFY = Opcode.NOTIFY +UPDATE = Opcode.UPDATE + +### END generated Opcode constants diff --git a/backend/test/lib/python3.8/site-packages/dns/py.typed b/backend/test/lib/python3.8/site-packages/dns/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/dns/query.py b/backend/test/lib/python3.8/site-packages/dns/query.py new file mode 100644 index 0000000000000000000000000000000000000000..0d71125156603e7f60f5710b2a4e8c88048eecc5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/query.py @@ -0,0 +1,1519 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Talk to a DNS server.""" + +import base64 +import contextlib +import enum +import errno +import os +import selectors +import socket +import struct +import time +from typing import Any, Dict, Optional, Tuple, Union + +import dns.exception +import dns.inet +import dns.message +import dns.name +import dns.quic +import dns.rcode +import dns.rdataclass +import dns.rdatatype +import dns.serial +import dns.transaction +import dns.tsig +import dns.xfr + + +def _remaining(expiration): + if expiration is None: + return None + timeout = expiration - time.time() + if timeout <= 0.0: + raise dns.exception.Timeout + return timeout + + +def _expiration_for_this_attempt(timeout, expiration): + if expiration is None: + return None + return min(time.time() + timeout, expiration) + + +_have_httpx = False +_have_http2 = False +try: + import httpcore + import httpcore._backends.sync + import httpx + + _CoreNetworkBackend = httpcore.NetworkBackend + _CoreSyncStream = httpcore._backends.sync.SyncStream + + _have_httpx = True + try: + # See if http2 support is available. + with httpx.Client(http2=True): + _have_http2 = True + except Exception: + pass + + class _NetworkBackend(_CoreNetworkBackend): + def __init__(self, resolver, local_port, bootstrap_address, family): + super().__init__() + self._local_port = local_port + self._resolver = resolver + self._bootstrap_address = bootstrap_address + self._family = family + + def connect_tcp( + self, host, port, timeout, local_address, socket_options=None + ): # pylint: disable=signature-differs + addresses = [] + _, expiration = _compute_times(timeout) + if dns.inet.is_address(host): + addresses.append(host) + elif self._bootstrap_address is not None: + addresses.append(self._bootstrap_address) + else: + timeout = _remaining(expiration) + family = self._family + if local_address: + family = dns.inet.af_for_address(local_address) + answers = self._resolver.resolve_name( + host, family=family, lifetime=timeout + ) + addresses = answers.addresses() + for address in addresses: + af = dns.inet.af_for_address(address) + if local_address is not None or self._local_port != 0: + source = dns.inet.low_level_address_tuple( + (local_address, self._local_port), af + ) + else: + source = None + sock = _make_socket(af, socket.SOCK_STREAM, source) + attempt_expiration = _expiration_for_this_attempt(2.0, expiration) + try: + _connect( + sock, + dns.inet.low_level_address_tuple((address, port), af), + attempt_expiration, + ) + return _CoreSyncStream(sock) + except Exception: + pass + raise httpcore.ConnectError + + def connect_unix_socket( + self, path, timeout, socket_options=None + ): # pylint: disable=signature-differs + raise NotImplementedError + + class _HTTPTransport(httpx.HTTPTransport): + def __init__( + self, + *args, + local_port=0, + bootstrap_address=None, + resolver=None, + family=socket.AF_UNSPEC, + **kwargs, + ): + if resolver is None: + # pylint: disable=import-outside-toplevel,redefined-outer-name + import dns.resolver + + resolver = dns.resolver.Resolver() + super().__init__(*args, **kwargs) + self._pool._network_backend = _NetworkBackend( + resolver, local_port, bootstrap_address, family + ) + +except ImportError: # pragma: no cover + + class _HTTPTransport: # type: ignore + def connect_tcp(self, host, port, timeout, local_address): + raise NotImplementedError + + +have_doh = _have_httpx + +try: + import ssl +except ImportError: # pragma: no cover + + class ssl: # type: ignore + class WantReadException(Exception): + pass + + class WantWriteException(Exception): + pass + + class SSLContext: + pass + + class SSLSocket: + pass + + @classmethod + def create_default_context(cls, *args, **kwargs): + raise Exception("no ssl support") # pylint: disable=broad-exception-raised + + +# Function used to create a socket. Can be overridden if needed in special +# situations. +socket_factory = socket.socket + + +class UnexpectedSource(dns.exception.DNSException): + """A DNS query response came from an unexpected address or port.""" + + +class BadResponse(dns.exception.FormError): + """A DNS query response does not respond to the question asked.""" + + +class NoDOH(dns.exception.DNSException): + """DNS over HTTPS (DOH) was requested but the httpx module is not + available.""" + + +class NoDOQ(dns.exception.DNSException): + """DNS over QUIC (DOQ) was requested but the aioquic module is not + available.""" + + +# for backwards compatibility +TransferError = dns.xfr.TransferError + + +def _compute_times(timeout): + now = time.time() + if timeout is None: + return (now, None) + else: + return (now, now + timeout) + + +def _wait_for(fd, readable, writable, _, expiration): + # Use the selected selector class to wait for any of the specified + # events. An "expiration" absolute time is converted into a relative + # timeout. + # + # The unused parameter is 'error', which is always set when + # selecting for read or write, and we have no error-only selects. + + if readable and isinstance(fd, ssl.SSLSocket) and fd.pending() > 0: + return True + sel = _selector_class() + events = 0 + if readable: + events |= selectors.EVENT_READ + if writable: + events |= selectors.EVENT_WRITE + if events: + sel.register(fd, events) + if expiration is None: + timeout = None + else: + timeout = expiration - time.time() + if timeout <= 0.0: + raise dns.exception.Timeout + if not sel.select(timeout): + raise dns.exception.Timeout + + +def _set_selector_class(selector_class): + # Internal API. Do not use. + + global _selector_class + + _selector_class = selector_class + + +if hasattr(selectors, "PollSelector"): + # Prefer poll() on platforms that support it because it has no + # limits on the maximum value of a file descriptor (plus it will + # be more efficient for high values). + # + # We ignore typing here as we can't say _selector_class is Any + # on python < 3.8 due to a bug. + _selector_class = selectors.PollSelector # type: ignore +else: + _selector_class = selectors.SelectSelector # type: ignore + + +def _wait_for_readable(s, expiration): + _wait_for(s, True, False, True, expiration) + + +def _wait_for_writable(s, expiration): + _wait_for(s, False, True, True, expiration) + + +def _addresses_equal(af, a1, a2): + # Convert the first value of the tuple, which is a textual format + # address into binary form, so that we are not confused by different + # textual representations of the same address + try: + n1 = dns.inet.inet_pton(af, a1[0]) + n2 = dns.inet.inet_pton(af, a2[0]) + except dns.exception.SyntaxError: + return False + return n1 == n2 and a1[1:] == a2[1:] + + +def _matches_destination(af, from_address, destination, ignore_unexpected): + # Check that from_address is appropriate for a response to a query + # sent to destination. + if not destination: + return True + if _addresses_equal(af, from_address, destination) or ( + dns.inet.is_multicast(destination[0]) and from_address[1:] == destination[1:] + ): + return True + elif ignore_unexpected: + return False + raise UnexpectedSource( + f"got a response from {from_address} instead of " f"{destination}" + ) + + +def _destination_and_source( + where, port, source, source_port, where_must_be_address=True +): + # Apply defaults and compute destination and source tuples + # suitable for use in connect(), sendto(), or bind(). + af = None + destination = None + try: + af = dns.inet.af_for_address(where) + destination = where + except Exception: + if where_must_be_address: + raise + # URLs are ok so eat the exception + if source: + saf = dns.inet.af_for_address(source) + if af: + # We know the destination af, so source had better agree! + if saf != af: + raise ValueError( + "different address families for source and destination" + ) + else: + # We didn't know the destination af, but we know the source, + # so that's our af. + af = saf + if source_port and not source: + # Caller has specified a source_port but not an address, so we + # need to return a source, and we need to use the appropriate + # wildcard address as the address. + try: + source = dns.inet.any_for_af(af) + except Exception: + # we catch this and raise ValueError for backwards compatibility + raise ValueError("source_port specified but address family is unknown") + # Convert high-level (address, port) tuples into low-level address + # tuples. + if destination: + destination = dns.inet.low_level_address_tuple((destination, port), af) + if source: + source = dns.inet.low_level_address_tuple((source, source_port), af) + return (af, destination, source) + + +def _make_socket(af, type, source, ssl_context=None, server_hostname=None): + s = socket_factory(af, type) + try: + s.setblocking(False) + if source is not None: + s.bind(source) + if ssl_context: + # LGTM gets a false positive here, as our default context is OK + return ssl_context.wrap_socket( + s, + do_handshake_on_connect=False, # lgtm[py/insecure-protocol] + server_hostname=server_hostname, + ) + else: + return s + except Exception: + s.close() + raise + + +def https( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 443, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + session: Optional[Any] = None, + path: str = "/dns-query", + post: bool = True, + bootstrap_address: Optional[str] = None, + verify: Union[bool, str] = True, + resolver: Optional["dns.resolver.Resolver"] = None, + family: Optional[int] = socket.AF_UNSPEC, +) -> dns.message.Message: + """Return the response obtained after sending a query via DNS-over-HTTPS. + + *q*, a ``dns.message.Message``, the query to send. + + *where*, a ``str``, the nameserver IP address or the full URL. If an IP address is + given, the URL will be constructed using the following schema: + https://<IP-address>:<port>/<path>. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query + times out. If ``None``, the default, wait forever. + + *port*, a ``int``, the port to send the query to. The default is 443. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source + address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. The default is + 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + received message. + + *session*, an ``httpx.Client``. If provided, the client session to use to send the + queries. + + *path*, a ``str``. If *where* is an IP address, then *path* will be used to + construct the URL to send the DNS query to. + + *post*, a ``bool``. If ``True``, the default, POST method will be used. + + *bootstrap_address*, a ``str``, the IP address to use to bypass resolution. + + *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification + of the server is done using the default CA bundle; if ``False``, then no + verification is done; if a `str` then it specifies the path to a certificate file or + directory which will be used for verification. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for + resolution of hostnames in URLs. If not specified, a new resolver with a default + configuration will be used; note this is *not* the default resolver as that resolver + might have been configured to use DoH causing a chicken-and-egg problem. This + parameter only has an effect if the HTTP library is httpx. + + *family*, an ``int``, the address family. If socket.AF_UNSPEC (the default), both A + and AAAA records will be retrieved. + + Returns a ``dns.message.Message``. + """ + + if not have_doh: + raise NoDOH # pragma: no cover + if session and not isinstance(session, httpx.Client): + raise ValueError("session parameter must be an httpx.Client") + + wire = q.to_wire() + (af, _, the_source) = _destination_and_source( + where, port, source, source_port, False + ) + transport = None + headers = {"accept": "application/dns-message"} + if af is not None and dns.inet.is_address(where): + if af == socket.AF_INET: + url = "https://{}:{}{}".format(where, port, path) + elif af == socket.AF_INET6: + url = "https://[{}]:{}{}".format(where, port, path) + else: + url = where + + # set source port and source address + + if the_source is None: + local_address = None + local_port = 0 + else: + local_address = the_source[0] + local_port = the_source[1] + transport = _HTTPTransport( + local_address=local_address, + http1=True, + http2=_have_http2, + verify=verify, + local_port=local_port, + bootstrap_address=bootstrap_address, + resolver=resolver, + family=family, + ) + + if session: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(session) + else: + cm = httpx.Client( + http1=True, http2=_have_http2, verify=verify, transport=transport + ) + with cm as session: + # see https://tools.ietf.org/html/rfc8484#section-4.1.1 for DoH + # GET and POST examples + if post: + headers.update( + { + "content-type": "application/dns-message", + "content-length": str(len(wire)), + } + ) + response = session.post(url, headers=headers, content=wire, timeout=timeout) + else: + wire = base64.urlsafe_b64encode(wire).rstrip(b"=") + twire = wire.decode() # httpx does a repr() if we give it bytes + response = session.get( + url, headers=headers, timeout=timeout, params={"dns": twire} + ) + + # see https://tools.ietf.org/html/rfc8484#section-4.2.1 for info about DoH + # status codes + if response.status_code < 200 or response.status_code > 299: + raise ValueError( + "{} responded with status code {}" + "\nResponse body: {}".format(where, response.status_code, response.content) + ) + r = dns.message.from_wire( + response.content, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = response.elapsed.total_seconds() + if not q.is_response(r): + raise BadResponse + return r + + +def _udp_recv(sock, max_size, expiration): + """Reads a datagram from the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + while True: + try: + return sock.recvfrom(max_size) + except BlockingIOError: + _wait_for_readable(sock, expiration) + + +def _udp_send(sock, data, destination, expiration): + """Sends the specified datagram to destination over the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + while True: + try: + if destination: + return sock.sendto(data, destination) + else: + return sock.send(data) + except BlockingIOError: # pragma: no cover + _wait_for_writable(sock, expiration) + + +def send_udp( + sock: Any, + what: Union[dns.message.Message, bytes], + destination: Any, + expiration: Optional[float] = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified UDP socket. + + *sock*, a ``socket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where to send the query. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + what = what.to_wire() + sent_time = time.time() + n = _udp_send(sock, what, destination, expiration) + return (n, sent_time) + + +def receive_udp( + sock: Any, + destination: Optional[Any] = None, + expiration: Optional[float] = None, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + request_mac: Optional[bytes] = b"", + ignore_trailing: bool = False, + raise_on_truncation: bool = False, +) -> Any: + """Read a DNS message from a UDP socket. + + *sock*, a ``socket``. + + *destination*, a destination tuple appropriate for the address family + of the socket, specifying where the message is expected to arrive from. + When receiving a response, this would be where the associated query was + sent. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if + the TC bit is set. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + If *destination* is not ``None``, returns a ``(dns.message.Message, float)`` + tuple of the received message and the received time. + + If *destination* is ``None``, returns a + ``(dns.message.Message, float, tuple)`` + tuple of the received message, the received time, and the address where + the message arrived from. + """ + + wire = b"" + while True: + (wire, from_address) = _udp_recv(sock, 65535, expiration) + if _matches_destination( + sock.family, from_address, destination, ignore_unexpected + ): + break + received_time = time.time() + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + raise_on_truncation=raise_on_truncation, + ) + if destination: + return (r, received_time) + else: + return (r, received_time, from_address) + + +def udp( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + raise_on_truncation: bool = False, + sock: Optional[Any] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via UDP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *raise_on_truncation*, a ``bool``. If ``True``, raise an exception if + the TC bit is set. + + *sock*, a ``socket.socket``, or ``None``, the socket to use for the + query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking datagram socket, + and the *source* and *source_port* are ignored. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + (begin_time, expiration) = _compute_times(timeout) + if sock: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) + else: + cm = _make_socket(af, socket.SOCK_DGRAM, source) + with cm as s: + send_udp(s, wire, destination, expiration) + (r, received_time) = receive_udp( + s, + destination, + expiration, + ignore_unexpected, + one_rr_per_rrset, + q.keyring, + q.mac, + ignore_trailing, + raise_on_truncation, + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + assert ( + False # help mypy figure out we can't get here lgtm[py/unreachable-statement] + ) + + +def udp_with_fallback( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + ignore_unexpected: bool = False, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + udp_sock: Optional[Any] = None, + tcp_sock: Optional[Any] = None, +) -> Tuple[dns.message.Message, bool]: + """Return the response to the query, trying UDP first and falling back + to TCP if UDP results in a truncated response. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *ignore_unexpected*, a ``bool``. If ``True``, ignore responses from + unexpected sources. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *udp_sock*, a ``socket.socket``, or ``None``, the socket to use for the + UDP query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking datagram socket, + and the *source* and *source_port* are ignored for the UDP query. + + *tcp_sock*, a ``socket.socket``, or ``None``, the connected socket to use for the + TCP query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking connected stream + socket, and *where*, *source* and *source_port* are ignored for the TCP + query. + + Returns a (``dns.message.Message``, tcp) tuple where tcp is ``True`` + if and only if TCP was used. + """ + try: + response = udp( + q, + where, + timeout, + port, + source, + source_port, + ignore_unexpected, + one_rr_per_rrset, + ignore_trailing, + True, + udp_sock, + ) + return (response, False) + except dns.message.Truncated: + response = tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + tcp_sock, + ) + return (response, True) + + +def _net_read(sock, count, expiration): + """Read the specified number of bytes from sock. Keep trying until we + either get the desired amount, or we hit EOF. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + s = b"" + while count > 0: + try: + n = sock.recv(count) + if n == b"": + raise EOFError + count -= len(n) + s += n + except (BlockingIOError, ssl.SSLWantReadError): + _wait_for_readable(sock, expiration) + except ssl.SSLWantWriteError: # pragma: no cover + _wait_for_writable(sock, expiration) + return s + + +def _net_write(sock, data, expiration): + """Write the specified data to the socket. + A Timeout exception will be raised if the operation is not completed + by the expiration time. + """ + current = 0 + l = len(data) + while current < l: + try: + current += sock.send(data[current:]) + except (BlockingIOError, ssl.SSLWantWriteError): + _wait_for_writable(sock, expiration) + except ssl.SSLWantReadError: # pragma: no cover + _wait_for_readable(sock, expiration) + + +def send_tcp( + sock: Any, + what: Union[dns.message.Message, bytes], + expiration: Optional[float] = None, +) -> Tuple[int, float]: + """Send a DNS message to the specified TCP socket. + + *sock*, a ``socket``. + + *what*, a ``bytes`` or ``dns.message.Message``, the message to send. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + Returns an ``(int, float)`` tuple of bytes sent and the sent time. + """ + + if isinstance(what, dns.message.Message): + wire = what.to_wire() + else: + wire = what + l = len(wire) + # copying the wire into tcpmsg is inefficient, but lets us + # avoid writev() or doing a short write that would get pushed + # onto the net + tcpmsg = struct.pack("!H", l) + wire + sent_time = time.time() + _net_write(sock, tcpmsg, expiration) + return (len(tcpmsg), sent_time) + + +def receive_tcp( + sock: Any, + expiration: Optional[float] = None, + one_rr_per_rrset: bool = False, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + request_mac: Optional[bytes] = b"", + ignore_trailing: bool = False, +) -> Tuple[dns.message.Message, float]: + """Read a DNS message from a TCP socket. + + *sock*, a ``socket``. + + *expiration*, a ``float`` or ``None``, the absolute time at which + a timeout exception should be raised. If ``None``, no timeout will + occur. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *request_mac*, a ``bytes`` or ``None``, the MAC of the request (for TSIG). + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + Raises if the message is malformed, if network errors occur, of if + there is a timeout. + + Returns a ``(dns.message.Message, float)`` tuple of the received message + and the received time. + """ + + ldata = _net_read(sock, 2, expiration) + (l,) = struct.unpack("!H", ldata) + wire = _net_read(sock, l, expiration) + received_time = time.time() + r = dns.message.from_wire( + wire, + keyring=keyring, + request_mac=request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + return (r, received_time) + + +def _connect(s, address, expiration): + err = s.connect_ex(address) + if err == 0: + return + if err in (errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY): + _wait_for_writable(s, expiration) + err = s.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) + if err != 0: + raise OSError(err, os.strerror(err)) + + +def tcp( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 53, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: Optional[Any] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via TCP. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *sock*, a ``socket.socket``, or ``None``, the connected socket to use for the + query. If ``None``, the default, a socket is created. Note that + if a socket is provided, it must be a nonblocking connected stream + socket, and *where*, *port*, *source* and *source_port* are ignored. + + Returns a ``dns.message.Message``. + """ + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + if sock: + cm: contextlib.AbstractContextManager = contextlib.nullcontext(sock) + else: + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + cm = _make_socket(af, socket.SOCK_STREAM, source) + with cm as s: + if not sock: + _connect(s, destination, expiration) + send_tcp(s, wire, expiration) + (r, received_time) = receive_tcp( + s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + assert ( + False # help mypy figure out we can't get here lgtm[py/unreachable-statement] + ) + + +def _tls_handshake(s, expiration): + while True: + try: + s.do_handshake() + return + except ssl.SSLWantReadError: + _wait_for_readable(s, expiration) + except ssl.SSLWantWriteError: # pragma: no cover + _wait_for_writable(s, expiration) + + +def tls( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + sock: Optional[ssl.SSLSocket] = None, + ssl_context: Optional[ssl.SSLContext] = None, + server_hostname: Optional[str] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via TLS. + + *q*, a ``dns.message.Message``, the query to send + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the + query times out. If ``None``, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 853. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own + RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing + junk at end of the received message. + + *sock*, an ``ssl.SSLSocket``, or ``None``, the socket to use for + the query. If ``None``, the default, a socket is created. Note + that if a socket is provided, it must be a nonblocking connected + SSL stream socket, and *where*, *port*, *source*, *source_port*, + and *ssl_context* are ignored. + + *ssl_context*, an ``ssl.SSLContext``, the context to use when establishing + a TLS connection. If ``None``, the default, creates one with the default + configuration. + + *server_hostname*, a ``str`` containing the server's hostname. The + default is ``None``, which means that no hostname is known, and if an + SSL context is created, hostname checking will be disabled. + + Returns a ``dns.message.Message``. + + """ + + if sock: + # + # If a socket was provided, there's no special TLS handling needed. + # + return tcp( + q, + where, + timeout, + port, + source, + source_port, + one_rr_per_rrset, + ignore_trailing, + sock, + ) + + wire = q.to_wire() + (begin_time, expiration) = _compute_times(timeout) + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + if ssl_context is None and not sock: + ssl_context = ssl.create_default_context() + ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 + if server_hostname is None: + ssl_context.check_hostname = False + + with _make_socket( + af, + socket.SOCK_STREAM, + source, + ssl_context=ssl_context, + server_hostname=server_hostname, + ) as s: + _connect(s, destination, expiration) + _tls_handshake(s, expiration) + send_tcp(s, wire, expiration) + (r, received_time) = receive_tcp( + s, expiration, one_rr_per_rrset, q.keyring, q.mac, ignore_trailing + ) + r.time = received_time - begin_time + if not q.is_response(r): + raise BadResponse + return r + assert ( + False # help mypy figure out we can't get here lgtm[py/unreachable-statement] + ) + + +def quic( + q: dns.message.Message, + where: str, + timeout: Optional[float] = None, + port: int = 853, + source: Optional[str] = None, + source_port: int = 0, + one_rr_per_rrset: bool = False, + ignore_trailing: bool = False, + connection: Optional[dns.quic.SyncQuicConnection] = None, + verify: Union[bool, str] = True, + server_hostname: Optional[str] = None, +) -> dns.message.Message: + """Return the response obtained after sending a query via DNS-over-QUIC. + + *q*, a ``dns.message.Message``, the query to send. + + *where*, a ``str``, the nameserver IP address. + + *timeout*, a ``float`` or ``None``, the number of seconds to wait before the query + times out. If ``None``, the default, wait forever. + + *port*, a ``int``, the port to send the query to. The default is 853. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying the source + address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. The default is + 0. + + *one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own RRset. + + *ignore_trailing*, a ``bool``. If ``True``, ignore trailing junk at end of the + received message. + + *connection*, a ``dns.quic.SyncQuicConnection``. If provided, the + connection to use to send the query. + + *verify*, a ``bool`` or ``str``. If a ``True``, then TLS certificate verification + of the server is done using the default CA bundle; if ``False``, then no + verification is done; if a `str` then it specifies the path to a certificate file or + directory which will be used for verification. + + *server_hostname*, a ``str`` containing the server's hostname. The + default is ``None``, which means that no hostname is known, and if an + SSL context is created, hostname checking will be disabled. + + Returns a ``dns.message.Message``. + """ + + if not dns.quic.have_quic: + raise NoDOQ("DNS-over-QUIC is not available.") # pragma: no cover + + q.id = 0 + wire = q.to_wire() + the_connection: dns.quic.SyncQuicConnection + the_manager: dns.quic.SyncQuicManager + if connection: + manager: contextlib.AbstractContextManager = contextlib.nullcontext(None) + the_connection = connection + else: + manager = dns.quic.SyncQuicManager( + verify_mode=verify, server_name=server_hostname + ) + the_manager = manager # for type checking happiness + + with manager: + if not connection: + the_connection = the_manager.connect(where, port, source, source_port) + (start, expiration) = _compute_times(timeout) + with the_connection.make_stream(timeout) as stream: + stream.send(wire, True) + wire = stream.receive(_remaining(expiration)) + finish = time.time() + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.request_mac, + one_rr_per_rrset=one_rr_per_rrset, + ignore_trailing=ignore_trailing, + ) + r.time = max(finish - start, 0.0) + if not q.is_response(r): + raise BadResponse + return r + + +def xfr( + where: str, + zone: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.AXFR, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + timeout: Optional[float] = None, + port: int = 53, + keyring: Optional[Dict[dns.name.Name, dns.tsig.Key]] = None, + keyname: Optional[Union[dns.name.Name, str]] = None, + relativize: bool = True, + lifetime: Optional[float] = None, + source: Optional[str] = None, + source_port: int = 0, + serial: int = 0, + use_udp: bool = False, + keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, +) -> Any: + """Return a generator for the responses to a zone transfer. + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *zone*, a ``dns.name.Name`` or ``str``, the name of the zone to transfer. + + *rdtype*, an ``int`` or ``str``, the type of zone transfer. The + default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be + used to do an incremental transfer instead. + + *rdclass*, an ``int`` or ``str``, the class of the zone transfer. + The default is ``dns.rdataclass.IN``. + + *timeout*, a ``float``, the number of seconds to wait for each + response message. If None, the default, wait forever. + + *port*, an ``int``, the port send the message to. The default is 53. + + *keyring*, a ``dict``, the keyring to use for TSIG. + + *keyname*, a ``dns.name.Name`` or ``str``, the name of the TSIG + key to use. + + *relativize*, a ``bool``. If ``True``, all names in the zone will be + relativized to the zone origin. It is essential that the + relativize setting matches the one specified to + ``dns.zone.from_xfr()`` if using this generator to make a zone. + + *lifetime*, a ``float``, the total number of seconds to spend + doing the transfer. If ``None``, the default, then there is no + limit on the time the transfer may take. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *serial*, an ``int``, the SOA serial number to use as the base for + an IXFR diff sequence (only meaningful if *rdtype* is + ``dns.rdatatype.IXFR``). + + *use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR). + + *keyalgorithm*, a ``dns.name.Name`` or ``str``, the TSIG algorithm to use. + + Raises on errors, and so does the generator. + + Returns a generator of ``dns.message.Message`` objects. + """ + + if isinstance(zone, str): + zone = dns.name.from_text(zone) + rdtype = dns.rdatatype.RdataType.make(rdtype) + q = dns.message.make_query(zone, rdtype, rdclass) + if rdtype == dns.rdatatype.IXFR: + rrset = dns.rrset.from_text(zone, 0, "IN", "SOA", ". . %u 0 0 0 0" % serial) + q.authority.append(rrset) + if keyring is not None: + q.use_tsig(keyring, keyname, algorithm=keyalgorithm) + wire = q.to_wire() + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + if use_udp and rdtype != dns.rdatatype.IXFR: + raise ValueError("cannot do a UDP AXFR") + sock_type = socket.SOCK_DGRAM if use_udp else socket.SOCK_STREAM + with _make_socket(af, sock_type, source) as s: + (_, expiration) = _compute_times(lifetime) + _connect(s, destination, expiration) + l = len(wire) + if use_udp: + _udp_send(s, wire, None, expiration) + else: + tcpmsg = struct.pack("!H", l) + wire + _net_write(s, tcpmsg, expiration) + done = False + delete_mode = True + expecting_SOA = False + soa_rrset = None + if relativize: + origin = zone + oname = dns.name.empty + else: + origin = None + oname = zone + tsig_ctx = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or ( + expiration is not None and mexpiration > expiration + ): + mexpiration = expiration + if use_udp: + (wire, _) = _udp_recv(s, 65535, mexpiration) + else: + ldata = _net_read(s, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + wire = _net_read(s, l, mexpiration) + is_ixfr = rdtype == dns.rdatatype.IXFR + r = dns.message.from_wire( + wire, + keyring=q.keyring, + request_mac=q.mac, + xfr=True, + origin=origin, + tsig_ctx=tsig_ctx, + multi=True, + one_rr_per_rrset=is_ixfr, + ) + rcode = r.rcode() + if rcode != dns.rcode.NOERROR: + raise TransferError(rcode) + tsig_ctx = r.tsig_ctx + answer_index = 0 + if soa_rrset is None: + if not r.answer or r.answer[0].name != oname: + raise dns.exception.FormError("No answer or RRset not for qname") + rrset = r.answer[0] + if rrset.rdtype != dns.rdatatype.SOA: + raise dns.exception.FormError("first RRset is not an SOA") + answer_index = 1 + soa_rrset = rrset.copy() + if rdtype == dns.rdatatype.IXFR: + if dns.serial.Serial(soa_rrset[0].serial) <= serial: + # + # We're already up-to-date. + # + done = True + else: + expecting_SOA = True + # + # Process SOAs in the answer section (other than the initial + # SOA in the first message). + # + for rrset in r.answer[answer_index:]: + if done: + raise dns.exception.FormError("answers after final SOA") + if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname: + if expecting_SOA: + if rrset[0].serial != serial: + raise dns.exception.FormError("IXFR base serial mismatch") + expecting_SOA = False + elif rdtype == dns.rdatatype.IXFR: + delete_mode = not delete_mode + # + # If this SOA RRset is equal to the first we saw then we're + # finished. If this is an IXFR we also check that we're + # seeing the record in the expected part of the response. + # + if rrset == soa_rrset and ( + rdtype == dns.rdatatype.AXFR + or (rdtype == dns.rdatatype.IXFR and delete_mode) + ): + done = True + elif expecting_SOA: + # + # We made an IXFR request and are expecting another + # SOA RR, but saw something else, so this must be an + # AXFR response. + # + rdtype = dns.rdatatype.AXFR + expecting_SOA = False + if done and q.keyring and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") + yield r + + +class UDPMode(enum.IntEnum): + """How should UDP be used in an IXFR from :py:func:`inbound_xfr()`? + + NEVER means "never use UDP; always use TCP" + TRY_FIRST means "try to use UDP but fall back to TCP if needed" + ONLY means "raise ``dns.xfr.UseTCP`` if trying UDP does not succeed" + """ + + NEVER = 0 + TRY_FIRST = 1 + ONLY = 2 + + +def inbound_xfr( + where: str, + txn_manager: dns.transaction.TransactionManager, + query: Optional[dns.message.Message] = None, + port: int = 53, + timeout: Optional[float] = None, + lifetime: Optional[float] = None, + source: Optional[str] = None, + source_port: int = 0, + udp_mode: UDPMode = UDPMode.NEVER, +) -> None: + """Conduct an inbound transfer and apply it via a transaction from the + txn_manager. + + *where*, a ``str`` containing an IPv4 or IPv6 address, where + to send the message. + + *txn_manager*, a ``dns.transaction.TransactionManager``, the txn_manager + for this transfer (typically a ``dns.zone.Zone``). + + *query*, the query to send. If not supplied, a default query is + constructed using information from the *txn_manager*. + + *port*, an ``int``, the port send the message to. The default is 53. + + *timeout*, a ``float``, the number of seconds to wait for each + response message. If None, the default, wait forever. + + *lifetime*, a ``float``, the total number of seconds to spend + doing the transfer. If ``None``, the default, then there is no + limit on the time the transfer may take. + + *source*, a ``str`` containing an IPv4 or IPv6 address, specifying + the source address. The default is the wildcard address. + + *source_port*, an ``int``, the port from which to send the message. + The default is 0. + + *udp_mode*, a ``dns.query.UDPMode``, determines how UDP is used + for IXFRs. The default is ``dns.UDPMode.NEVER``, i.e. only use + TCP. Other possibilities are ``dns.UDPMode.TRY_FIRST``, which + means "try UDP but fallback to TCP if needed", and + ``dns.UDPMode.ONLY``, which means "try UDP and raise + ``dns.xfr.UseTCP`` if it does not succeed. + + Raises on errors. + """ + if query is None: + (query, serial) = dns.xfr.make_query(txn_manager) + else: + serial = dns.xfr.extract_serial_from_query(query) + rdtype = query.question[0].rdtype + is_ixfr = rdtype == dns.rdatatype.IXFR + origin = txn_manager.from_wire_origin() + wire = query.to_wire() + (af, destination, source) = _destination_and_source( + where, port, source, source_port + ) + (_, expiration) = _compute_times(lifetime) + retry = True + while retry: + retry = False + if is_ixfr and udp_mode != UDPMode.NEVER: + sock_type = socket.SOCK_DGRAM + is_udp = True + else: + sock_type = socket.SOCK_STREAM + is_udp = False + with _make_socket(af, sock_type, source) as s: + _connect(s, destination, expiration) + if is_udp: + _udp_send(s, wire, None, expiration) + else: + tcpmsg = struct.pack("!H", len(wire)) + wire + _net_write(s, tcpmsg, expiration) + with dns.xfr.Inbound(txn_manager, rdtype, serial, is_udp) as inbound: + done = False + tsig_ctx = None + while not done: + (_, mexpiration) = _compute_times(timeout) + if mexpiration is None or ( + expiration is not None and mexpiration > expiration + ): + mexpiration = expiration + if is_udp: + (rwire, _) = _udp_recv(s, 65535, mexpiration) + else: + ldata = _net_read(s, 2, mexpiration) + (l,) = struct.unpack("!H", ldata) + rwire = _net_read(s, l, mexpiration) + r = dns.message.from_wire( + rwire, + keyring=query.keyring, + request_mac=query.mac, + xfr=True, + origin=origin, + tsig_ctx=tsig_ctx, + multi=(not is_udp), + one_rr_per_rrset=is_ixfr, + ) + try: + done = inbound.process_message(r) + except dns.xfr.UseTCP: + assert is_udp # should not happen if we used TCP! + if udp_mode == UDPMode.ONLY: + raise + done = True + retry = True + udp_mode = UDPMode.NEVER + continue + tsig_ctx = r.tsig_ctx + if not retry and query.keyring and not r.had_tsig: + raise dns.exception.FormError("missing TSIG") diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/__init__.py b/backend/test/lib/python3.8/site-packages/dns/quic/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..69813f9f18cc28eac706225187fb93c342aed95b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/quic/__init__.py @@ -0,0 +1,75 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +try: + import aioquic.quic.configuration # type: ignore + + import dns.asyncbackend + from dns._asyncbackend import NullContext + from dns.quic._asyncio import ( + AsyncioQuicConnection, + AsyncioQuicManager, + AsyncioQuicStream, + ) + from dns.quic._common import AsyncQuicConnection, AsyncQuicManager + from dns.quic._sync import SyncQuicConnection, SyncQuicManager, SyncQuicStream + + have_quic = True + + def null_factory( + *args, # pylint: disable=unused-argument + **kwargs # pylint: disable=unused-argument + ): + return NullContext(None) + + def _asyncio_manager_factory( + context, *args, **kwargs # pylint: disable=unused-argument + ): + return AsyncioQuicManager(*args, **kwargs) + + # We have a context factory and a manager factory as for trio we need to have + # a nursery. + + _async_factories = {"asyncio": (null_factory, _asyncio_manager_factory)} + + try: + import trio + + from dns.quic._trio import ( # pylint: disable=ungrouped-imports + TrioQuicConnection, + TrioQuicManager, + TrioQuicStream, + ) + + def _trio_context_factory(): + return trio.open_nursery() + + def _trio_manager_factory(context, *args, **kwargs): + return TrioQuicManager(context, *args, **kwargs) + + _async_factories["trio"] = (_trio_context_factory, _trio_manager_factory) + except ImportError: + pass + + def factories_for_backend(backend=None): + if backend is None: + backend = dns.asyncbackend.get_default_backend() + return _async_factories[backend.name()] + +except ImportError: + have_quic = False + + from typing import Any + + class AsyncQuicStream: # type: ignore + pass + + class AsyncQuicConnection: # type: ignore + async def make_stream(self) -> Any: + raise NotImplementedError + + class SyncQuicStream: # type: ignore + pass + + class SyncQuicConnection: # type: ignore + def make_stream(self) -> Any: + raise NotImplementedError diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b6839d41f4cec2d092d41b8bca544215ae32b15 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_asyncio.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_asyncio.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96cd6960513023fab51f59daf054c7a17a286bcc Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_asyncio.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_common.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_common.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0eea2fe256479bbae5d6c5625f2a0bacb87d0ca2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_common.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_sync.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_sync.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8cfd637954c2403be9141fce86793dbc0dff178e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_sync.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_trio.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_trio.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..71ebdc3ed0267bb8b94d8f550ea840453f590c84 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/quic/__pycache__/_trio.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/_asyncio.py b/backend/test/lib/python3.8/site-packages/dns/quic/_asyncio.py new file mode 100644 index 0000000000000000000000000000000000000000..e1c52339d30ca0332593254fbaaed677657e74f1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/quic/_asyncio.py @@ -0,0 +1,223 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import asyncio +import socket +import ssl +import struct +import time + +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore +import aioquic.quic.events # type: ignore + +import dns.asyncbackend +import dns.exception +import dns.inet +from dns.quic._common import ( + QUIC_MAX_DATAGRAM, + AsyncQuicConnection, + AsyncQuicManager, + BaseQuicStream, + UnexpectedEOF, +) + + +class AsyncioQuicStream(BaseQuicStream): + def __init__(self, connection, stream_id): + super().__init__(connection, stream_id) + self._wake_up = asyncio.Condition() + + async def _wait_for_wake_up(self): + async with self._wake_up: + await self._wake_up.wait() + + async def wait_for(self, amount, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + if self._buffer.have(amount): + return + self._expecting = amount + try: + await asyncio.wait_for(self._wait_for_wake_up(), timeout) + except TimeoutError: + raise dns.exception.Timeout + self._expecting = 0 + + async def receive(self, timeout=None): + expiration = self._expiration_from_timeout(timeout) + await self.wait_for(2, expiration) + (size,) = struct.unpack("!H", self._buffer.get(2)) + await self.wait_for(size, expiration) + return self._buffer.get(size) + + async def send(self, datagram, is_end=False): + data = self._encapsulate(datagram) + await self._connection.write(self._stream_id, data, is_end) + + async def _add_input(self, data, is_end): + if self._common_add_input(data, is_end): + async with self._wake_up: + self._wake_up.notify() + + async def close(self): + self._close() + + # Streams are async context managers + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + async with self._wake_up: + self._wake_up.notify() + return False + + +class AsyncioQuicConnection(AsyncQuicConnection): + def __init__(self, connection, address, port, source, source_port, manager=None): + super().__init__(connection, address, port, source, source_port, manager) + self._socket = None + self._handshake_complete = asyncio.Event() + self._socket_created = asyncio.Event() + self._wake_timer = asyncio.Condition() + self._receiver_task = None + self._sender_task = None + + async def _receiver(self): + try: + af = dns.inet.af_for_address(self._address) + backend = dns.asyncbackend.get_backend("asyncio") + # Note that peer is a low-level address tuple, but make_socket() wants + # a high-level address tuple, so we convert. + self._socket = await backend.make_socket( + af, socket.SOCK_DGRAM, 0, self._source, (self._peer[0], self._peer[1]) + ) + self._socket_created.set() + async with self._socket: + while not self._done: + (datagram, address) = await self._socket.recvfrom( + QUIC_MAX_DATAGRAM, None + ) + if address[0] != self._peer[0] or address[1] != self._peer[1]: + continue + self._connection.receive_datagram( + datagram, self._peer[0], time.time() + ) + # Wake up the timer in case the sender is sleeping, as there may be + # stuff to send now. + async with self._wake_timer: + self._wake_timer.notify_all() + except Exception: + pass + finally: + self._done = True + async with self._wake_timer: + self._wake_timer.notify_all() + self._handshake_complete.set() + + async def _wait_for_wake_timer(self): + async with self._wake_timer: + await self._wake_timer.wait() + + async def _sender(self): + await self._socket_created.wait() + while not self._done: + datagrams = self._connection.datagrams_to_send(time.time()) + for datagram, address in datagrams: + assert address == self._peer[0] + await self._socket.sendto(datagram, self._peer, None) + (expiration, interval) = self._get_timer_values() + try: + await asyncio.wait_for(self._wait_for_wake_timer(), interval) + except Exception: + pass + self._handle_timer(expiration) + await self._handle_events() + + async def _handle_events(self): + count = 0 + while True: + event = self._connection.next_event() + if event is None: + return + if isinstance(event, aioquic.quic.events.StreamDataReceived): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(event.data, event.end_stream) + elif isinstance(event, aioquic.quic.events.HandshakeCompleted): + self._handshake_complete.set() + elif isinstance( + event, aioquic.quic.events.ConnectionTerminated + ) or isinstance(event, aioquic.quic.events.StreamReset): + self._done = True + self._receiver_task.cancel() + count += 1 + if count > 10: + # yield + count = 0 + await asyncio.sleep(0) + + async def write(self, stream, data, is_end=False): + self._connection.send_stream_data(stream, data, is_end) + async with self._wake_timer: + self._wake_timer.notify_all() + + def run(self): + if self._closed: + return + self._receiver_task = asyncio.Task(self._receiver()) + self._sender_task = asyncio.Task(self._sender()) + + async def make_stream(self, timeout=None): + try: + await asyncio.wait_for(self._handshake_complete.wait(), timeout) + except TimeoutError: + raise dns.exception.Timeout + if self._done: + raise UnexpectedEOF + stream_id = self._connection.get_next_available_stream_id(False) + stream = AsyncioQuicStream(self, stream_id) + self._streams[stream_id] = stream + return stream + + async def close(self): + if not self._closed: + self._manager.closed(self._peer[0], self._peer[1]) + self._closed = True + self._connection.close() + # sender might be blocked on this, so set it + self._socket_created.set() + await self._socket.close() + async with self._wake_timer: + self._wake_timer.notify_all() + try: + await self._receiver_task + except asyncio.CancelledError: + pass + try: + await self._sender_task + except asyncio.CancelledError: + pass + + +class AsyncioQuicManager(AsyncQuicManager): + def __init__(self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None): + super().__init__(conf, verify_mode, AsyncioQuicConnection, server_name) + + def connect(self, address, port=853, source=None, source_port=0): + (connection, start) = self._connect(address, port, source, source_port) + if start: + connection.run() + return connection + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # Copy the iterator into a list as exiting things will mutate the connections + # table. + connections = list(self._connections.values()) + for connection in connections: + await connection.close() + return False diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/_common.py b/backend/test/lib/python3.8/site-packages/dns/quic/_common.py new file mode 100644 index 0000000000000000000000000000000000000000..38ec103ff8c04b0fa6da4b5e67c56f4fec989b11 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/quic/_common.py @@ -0,0 +1,180 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import socket +import struct +import time +from typing import Any, Optional + +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore + +import dns.inet + +QUIC_MAX_DATAGRAM = 2048 + + +class UnexpectedEOF(Exception): + pass + + +class Buffer: + def __init__(self): + self._buffer = b"" + self._seen_end = False + + def put(self, data, is_end): + if self._seen_end: + return + self._buffer += data + if is_end: + self._seen_end = True + + def have(self, amount): + if len(self._buffer) >= amount: + return True + if self._seen_end: + raise UnexpectedEOF + return False + + def seen_end(self): + return self._seen_end + + def get(self, amount): + assert self.have(amount) + data = self._buffer[:amount] + self._buffer = self._buffer[amount:] + return data + + +class BaseQuicStream: + def __init__(self, connection, stream_id): + self._connection = connection + self._stream_id = stream_id + self._buffer = Buffer() + self._expecting = 0 + + def id(self): + return self._stream_id + + def _expiration_from_timeout(self, timeout): + if timeout is not None: + expiration = time.time() + timeout + else: + expiration = None + return expiration + + def _timeout_from_expiration(self, expiration): + if expiration is not None: + timeout = max(expiration - time.time(), 0.0) + else: + timeout = None + return timeout + + # Subclass must implement receive() as sync / async and which returns a message + # or raises UnexpectedEOF. + + def _encapsulate(self, datagram): + l = len(datagram) + return struct.pack("!H", l) + datagram + + def _common_add_input(self, data, is_end): + self._buffer.put(data, is_end) + return self._expecting > 0 and self._buffer.have(self._expecting) + + def _close(self): + self._connection.close_stream(self._stream_id) + self._buffer.put(b"", True) # send EOF in case we haven't seen it. + + +class BaseQuicConnection: + def __init__( + self, connection, address, port, source=None, source_port=0, manager=None + ): + self._done = False + self._connection = connection + self._address = address + self._port = port + self._closed = False + self._manager = manager + self._streams = {} + self._af = dns.inet.af_for_address(address) + self._peer = dns.inet.low_level_address_tuple((address, port)) + if source is None and source_port != 0: + if self._af == socket.AF_INET: + source = "0.0.0.0" + elif self._af == socket.AF_INET6: + source = "::" + else: + raise NotImplementedError + if source: + self._source = (source, source_port) + else: + self._source = None + + def close_stream(self, stream_id): + del self._streams[stream_id] + + def _get_timer_values(self, closed_is_special=True): + now = time.time() + expiration = self._connection.get_timer() + if expiration is None: + expiration = now + 3600 # arbitrary "big" value + interval = max(expiration - now, 0) + if self._closed and closed_is_special: + # lower sleep interval to avoid a race in the closing process + # which can lead to higher latency closing due to sleeping when + # we have events. + interval = min(interval, 0.05) + return (expiration, interval) + + def _handle_timer(self, expiration): + now = time.time() + if expiration <= now: + self._connection.handle_timer(now) + + +class AsyncQuicConnection(BaseQuicConnection): + async def make_stream(self, timeout: Optional[float] = None) -> Any: + pass + + +class BaseQuicManager: + def __init__(self, conf, verify_mode, connection_factory, server_name=None): + self._connections = {} + self._connection_factory = connection_factory + if conf is None: + verify_path = None + if isinstance(verify_mode, str): + verify_path = verify_mode + verify_mode = True + conf = aioquic.quic.configuration.QuicConfiguration( + alpn_protocols=["doq", "doq-i03"], + verify_mode=verify_mode, + server_name=server_name, + ) + if verify_path is not None: + conf.load_verify_locations(verify_path) + self._conf = conf + + def _connect(self, address, port=853, source=None, source_port=0): + connection = self._connections.get((address, port)) + if connection is not None: + return (connection, False) + qconn = aioquic.quic.connection.QuicConnection(configuration=self._conf) + qconn.connect(address, time.time()) + connection = self._connection_factory( + qconn, address, port, source, source_port, self + ) + self._connections[(address, port)] = connection + return (connection, True) + + def closed(self, address, port): + try: + del self._connections[(address, port)] + except KeyError: + pass + + +class AsyncQuicManager(BaseQuicManager): + def connect(self, address, port=853, source=None, source_port=0): + raise NotImplementedError diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/_sync.py b/backend/test/lib/python3.8/site-packages/dns/quic/_sync.py new file mode 100644 index 0000000000000000000000000000000000000000..e944784dee94ac3ac39ff27a48653d534b638068 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/quic/_sync.py @@ -0,0 +1,226 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import selectors +import socket +import ssl +import struct +import threading +import time + +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore +import aioquic.quic.events # type: ignore + +import dns.exception +import dns.inet +from dns.quic._common import ( + QUIC_MAX_DATAGRAM, + BaseQuicConnection, + BaseQuicManager, + BaseQuicStream, + UnexpectedEOF, +) + +# Avoid circularity with dns.query +if hasattr(selectors, "PollSelector"): + _selector_class = selectors.PollSelector # type: ignore +else: + _selector_class = selectors.SelectSelector # type: ignore + + +class SyncQuicStream(BaseQuicStream): + def __init__(self, connection, stream_id): + super().__init__(connection, stream_id) + self._wake_up = threading.Condition() + self._lock = threading.Lock() + + def wait_for(self, amount, expiration): + while True: + timeout = self._timeout_from_expiration(expiration) + with self._lock: + if self._buffer.have(amount): + return + self._expecting = amount + with self._wake_up: + if not self._wake_up.wait(timeout): + raise dns.exception.Timeout + self._expecting = 0 + + def receive(self, timeout=None): + expiration = self._expiration_from_timeout(timeout) + self.wait_for(2, expiration) + with self._lock: + (size,) = struct.unpack("!H", self._buffer.get(2)) + self.wait_for(size, expiration) + with self._lock: + return self._buffer.get(size) + + def send(self, datagram, is_end=False): + data = self._encapsulate(datagram) + self._connection.write(self._stream_id, data, is_end) + + def _add_input(self, data, is_end): + if self._common_add_input(data, is_end): + with self._wake_up: + self._wake_up.notify() + + def close(self): + with self._lock: + self._close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + with self._wake_up: + self._wake_up.notify() + return False + + +class SyncQuicConnection(BaseQuicConnection): + def __init__(self, connection, address, port, source, source_port, manager): + super().__init__(connection, address, port, source, source_port, manager) + self._socket = socket.socket(self._af, socket.SOCK_DGRAM, 0) + self._socket.connect(self._peer) + (self._send_wakeup, self._receive_wakeup) = socket.socketpair() + self._receive_wakeup.setblocking(False) + self._socket.setblocking(False) + if self._source is not None: + try: + self._socket.bind( + dns.inet.low_level_address_tuple(self._source, self._af) + ) + except Exception: + self._socket.close() + raise + self._handshake_complete = threading.Event() + self._worker_thread = None + self._lock = threading.Lock() + + def _read(self): + count = 0 + while count < 10: + count += 1 + try: + datagram = self._socket.recv(QUIC_MAX_DATAGRAM) + except BlockingIOError: + return + with self._lock: + self._connection.receive_datagram(datagram, self._peer[0], time.time()) + + def _drain_wakeup(self): + while True: + try: + self._receive_wakeup.recv(32) + except BlockingIOError: + return + + def _worker(self): + try: + sel = _selector_class() + sel.register(self._socket, selectors.EVENT_READ, self._read) + sel.register(self._receive_wakeup, selectors.EVENT_READ, self._drain_wakeup) + while not self._done: + (expiration, interval) = self._get_timer_values(False) + items = sel.select(interval) + for key, _ in items: + key.data() + with self._lock: + self._handle_timer(expiration) + datagrams = self._connection.datagrams_to_send(time.time()) + for datagram, _ in datagrams: + try: + self._socket.send(datagram) + except BlockingIOError: + # we let QUIC handle any lossage + pass + self._handle_events() + finally: + with self._lock: + self._done = True + # Ensure anyone waiting for this gets woken up. + self._handshake_complete.set() + + def _handle_events(self): + while True: + with self._lock: + event = self._connection.next_event() + if event is None: + return + if isinstance(event, aioquic.quic.events.StreamDataReceived): + with self._lock: + stream = self._streams.get(event.stream_id) + if stream: + stream._add_input(event.data, event.end_stream) + elif isinstance(event, aioquic.quic.events.HandshakeCompleted): + self._handshake_complete.set() + elif isinstance( + event, aioquic.quic.events.ConnectionTerminated + ) or isinstance(event, aioquic.quic.events.StreamReset): + with self._lock: + self._done = True + + def write(self, stream, data, is_end=False): + with self._lock: + self._connection.send_stream_data(stream, data, is_end) + self._send_wakeup.send(b"\x01") + + def run(self): + if self._closed: + return + self._worker_thread = threading.Thread(target=self._worker) + self._worker_thread.start() + + def make_stream(self, timeout=None): + if not self._handshake_complete.wait(timeout): + raise dns.exception.Timeout + with self._lock: + if self._done: + raise UnexpectedEOF + stream_id = self._connection.get_next_available_stream_id(False) + stream = SyncQuicStream(self, stream_id) + self._streams[stream_id] = stream + return stream + + def close_stream(self, stream_id): + with self._lock: + super().close_stream(stream_id) + + def close(self): + with self._lock: + if self._closed: + return + self._manager.closed(self._peer[0], self._peer[1]) + self._closed = True + self._connection.close() + self._send_wakeup.send(b"\x01") + self._worker_thread.join() + + +class SyncQuicManager(BaseQuicManager): + def __init__(self, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None): + super().__init__(conf, verify_mode, SyncQuicConnection, server_name) + self._lock = threading.Lock() + + def connect(self, address, port=853, source=None, source_port=0): + with self._lock: + (connection, start) = self._connect(address, port, source, source_port) + if start: + connection.run() + return connection + + def closed(self, address, port): + with self._lock: + super().closed(address, port) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # Copy the iterator into a list as exiting things will mutate the connections + # table. + connections = list(self._connections.values()) + for connection in connections: + connection.close() + return False diff --git a/backend/test/lib/python3.8/site-packages/dns/quic/_trio.py b/backend/test/lib/python3.8/site-packages/dns/quic/_trio.py new file mode 100644 index 0000000000000000000000000000000000000000..ee07e4f6e8808fd8a134dd33a2c8e9f9e8146fea --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/quic/_trio.py @@ -0,0 +1,189 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import socket +import ssl +import struct +import time + +import aioquic.quic.configuration # type: ignore +import aioquic.quic.connection # type: ignore +import aioquic.quic.events # type: ignore +import trio + +import dns.exception +import dns.inet +from dns._asyncbackend import NullContext +from dns.quic._common import ( + QUIC_MAX_DATAGRAM, + AsyncQuicConnection, + AsyncQuicManager, + BaseQuicStream, + UnexpectedEOF, +) + + +class TrioQuicStream(BaseQuicStream): + def __init__(self, connection, stream_id): + super().__init__(connection, stream_id) + self._wake_up = trio.Condition() + + async def wait_for(self, amount): + while True: + if self._buffer.have(amount): + return + self._expecting = amount + async with self._wake_up: + await self._wake_up.wait() + self._expecting = 0 + + async def receive(self, timeout=None): + if timeout is None: + context = NullContext(None) + else: + context = trio.move_on_after(timeout) + with context: + await self.wait_for(2) + (size,) = struct.unpack("!H", self._buffer.get(2)) + await self.wait_for(size) + return self._buffer.get(size) + raise dns.exception.Timeout + + async def send(self, datagram, is_end=False): + data = self._encapsulate(datagram) + await self._connection.write(self._stream_id, data, is_end) + + async def _add_input(self, data, is_end): + if self._common_add_input(data, is_end): + async with self._wake_up: + self._wake_up.notify() + + async def close(self): + self._close() + + # Streams are async context managers + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + await self.close() + async with self._wake_up: + self._wake_up.notify() + return False + + +class TrioQuicConnection(AsyncQuicConnection): + def __init__(self, connection, address, port, source, source_port, manager=None): + super().__init__(connection, address, port, source, source_port, manager) + self._socket = trio.socket.socket(self._af, socket.SOCK_DGRAM, 0) + if self._source: + trio.socket.bind(dns.inet.low_level_address_tuple(self._source, self._af)) + self._handshake_complete = trio.Event() + self._run_done = trio.Event() + self._worker_scope = None + + async def _worker(self): + try: + await self._socket.connect(self._peer) + while not self._done: + (expiration, interval) = self._get_timer_values(False) + with trio.CancelScope( + deadline=trio.current_time() + interval + ) as self._worker_scope: + datagram = await self._socket.recv(QUIC_MAX_DATAGRAM) + self._connection.receive_datagram( + datagram, self._peer[0], time.time() + ) + self._worker_scope = None + self._handle_timer(expiration) + datagrams = self._connection.datagrams_to_send(time.time()) + for datagram, _ in datagrams: + await self._socket.send(datagram) + await self._handle_events() + finally: + self._done = True + self._handshake_complete.set() + + async def _handle_events(self): + count = 0 + while True: + event = self._connection.next_event() + if event is None: + return + if isinstance(event, aioquic.quic.events.StreamDataReceived): + stream = self._streams.get(event.stream_id) + if stream: + await stream._add_input(event.data, event.end_stream) + elif isinstance(event, aioquic.quic.events.HandshakeCompleted): + self._handshake_complete.set() + elif isinstance( + event, aioquic.quic.events.ConnectionTerminated + ) or isinstance(event, aioquic.quic.events.StreamReset): + self._done = True + self._socket.close() + count += 1 + if count > 10: + # yield + count = 0 + await trio.sleep(0) + + async def write(self, stream, data, is_end=False): + self._connection.send_stream_data(stream, data, is_end) + if self._worker_scope is not None: + self._worker_scope.cancel() + + async def run(self): + if self._closed: + return + async with trio.open_nursery() as nursery: + nursery.start_soon(self._worker) + self._run_done.set() + + async def make_stream(self, timeout=None): + if timeout is None: + context = NullContext(None) + else: + context = trio.move_on_after(timeout) + with context: + await self._handshake_complete.wait() + if self._done: + raise UnexpectedEOF + stream_id = self._connection.get_next_available_stream_id(False) + stream = TrioQuicStream(self, stream_id) + self._streams[stream_id] = stream + return stream + raise dns.exception.Timeout + + async def close(self): + if not self._closed: + self._manager.closed(self._peer[0], self._peer[1]) + self._closed = True + self._connection.close() + if self._worker_scope is not None: + self._worker_scope.cancel() + await self._run_done.wait() + + +class TrioQuicManager(AsyncQuicManager): + def __init__( + self, nursery, conf=None, verify_mode=ssl.CERT_REQUIRED, server_name=None + ): + super().__init__(conf, verify_mode, TrioQuicConnection, server_name) + self._nursery = nursery + + def connect(self, address, port=853, source=None, source_port=0): + (connection, start) = self._connect(address, port, source, source_port) + if start: + self._nursery.start_soon(connection.run) + return connection + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + # Copy the iterator into a list as exiting things will mutate the connections + # table. + connections = list(self._connections.values()) + for connection in connections: + await connection.close() + return False diff --git a/backend/test/lib/python3.8/site-packages/dns/rcode.py b/backend/test/lib/python3.8/site-packages/dns/rcode.py new file mode 100644 index 0000000000000000000000000000000000000000..8e6386f828019b379bbe97a3950ce604c4778f7f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rcode.py @@ -0,0 +1,168 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Result Codes.""" + +from typing import Tuple + +import dns.enum +import dns.exception + + +class Rcode(dns.enum.IntEnum): + #: No error + NOERROR = 0 + #: Format error + FORMERR = 1 + #: Server failure + SERVFAIL = 2 + #: Name does not exist ("Name Error" in RFC 1025 terminology). + NXDOMAIN = 3 + #: Not implemented + NOTIMP = 4 + #: Refused + REFUSED = 5 + #: Name exists. + YXDOMAIN = 6 + #: RRset exists. + YXRRSET = 7 + #: RRset does not exist. + NXRRSET = 8 + #: Not authoritative. + NOTAUTH = 9 + #: Name not in zone. + NOTZONE = 10 + #: DSO-TYPE Not Implemented + DSOTYPENI = 11 + #: Bad EDNS version. + BADVERS = 16 + #: TSIG Signature Failure + BADSIG = 16 + #: Key not recognized. + BADKEY = 17 + #: Signature out of time window. + BADTIME = 18 + #: Bad TKEY Mode. + BADMODE = 19 + #: Duplicate key name. + BADNAME = 20 + #: Algorithm not supported. + BADALG = 21 + #: Bad Truncation + BADTRUNC = 22 + #: Bad/missing Server Cookie + BADCOOKIE = 23 + + @classmethod + def _maximum(cls): + return 4095 + + @classmethod + def _unknown_exception_class(cls): + return UnknownRcode + + +class UnknownRcode(dns.exception.DNSException): + """A DNS rcode is unknown.""" + + +def from_text(text: str) -> Rcode: + """Convert text into an rcode. + + *text*, a ``str``, the textual rcode or an integer in textual form. + + Raises ``dns.rcode.UnknownRcode`` if the rcode mnemonic is unknown. + + Returns a ``dns.rcode.Rcode``. + """ + + return Rcode.from_text(text) + + +def from_flags(flags: int, ednsflags: int) -> Rcode: + """Return the rcode value encoded by flags and ednsflags. + + *flags*, an ``int``, the DNS flags field. + + *ednsflags*, an ``int``, the EDNS flags field. + + Raises ``ValueError`` if rcode is < 0 or > 4095 + + Returns a ``dns.rcode.Rcode``. + """ + + value = (flags & 0x000F) | ((ednsflags >> 20) & 0xFF0) + return Rcode.make(value) + + +def to_flags(value: Rcode) -> Tuple[int, int]: + """Return a (flags, ednsflags) tuple which encodes the rcode. + + *value*, a ``dns.rcode.Rcode``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns an ``(int, int)`` tuple. + """ + + if value < 0 or value > 4095: + raise ValueError("rcode must be >= 0 and <= 4095") + v = value & 0xF + ev = (value & 0xFF0) << 20 + return (v, ev) + + +def to_text(value: Rcode, tsig: bool = False) -> str: + """Convert rcode into text. + + *value*, a ``dns.rcode.Rcode``, the rcode. + + Raises ``ValueError`` if rcode is < 0 or > 4095. + + Returns a ``str``. + """ + + if tsig and value == Rcode.BADVERS: + return "BADSIG" + return Rcode.to_text(value) + + +### BEGIN generated Rcode constants + +NOERROR = Rcode.NOERROR +FORMERR = Rcode.FORMERR +SERVFAIL = Rcode.SERVFAIL +NXDOMAIN = Rcode.NXDOMAIN +NOTIMP = Rcode.NOTIMP +REFUSED = Rcode.REFUSED +YXDOMAIN = Rcode.YXDOMAIN +YXRRSET = Rcode.YXRRSET +NXRRSET = Rcode.NXRRSET +NOTAUTH = Rcode.NOTAUTH +NOTZONE = Rcode.NOTZONE +DSOTYPENI = Rcode.DSOTYPENI +BADVERS = Rcode.BADVERS +BADSIG = Rcode.BADSIG +BADKEY = Rcode.BADKEY +BADTIME = Rcode.BADTIME +BADMODE = Rcode.BADMODE +BADNAME = Rcode.BADNAME +BADALG = Rcode.BADALG +BADTRUNC = Rcode.BADTRUNC +BADCOOKIE = Rcode.BADCOOKIE + +### END generated Rcode constants diff --git a/backend/test/lib/python3.8/site-packages/dns/rdata.py b/backend/test/lib/python3.8/site-packages/dns/rdata.py new file mode 100644 index 0000000000000000000000000000000000000000..0d262e8d85b362a29ee4e34416afeb5108b0da45 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdata.py @@ -0,0 +1,889 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata.""" + +import base64 +import binascii +import inspect +import io +import itertools +import random +from importlib import import_module +from typing import Any, Dict, Optional, Tuple, Union + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdataclass +import dns.rdatatype +import dns.tokenizer +import dns.ttl +import dns.wire + +_chunksize = 32 + +# We currently allow comparisons for rdata with relative names for backwards +# compatibility, but in the future we will not, as these kinds of comparisons +# can lead to subtle bugs if code is not carefully written. +# +# This switch allows the future behavior to be turned on so code can be +# tested with it. +_allow_relative_comparisons = True + + +class NoRelativeRdataOrdering(dns.exception.DNSException): + """An attempt was made to do an ordered comparison of one or more + rdata with relative names. The only reliable way of sorting rdata + is to use non-relativized rdata. + + """ + + +def _wordbreak(data, chunksize=_chunksize, separator=b" "): + """Break a binary string into chunks of chunksize characters separated by + a space. + """ + + if not chunksize: + return data.decode() + return separator.join( + [data[i : i + chunksize] for i in range(0, len(data), chunksize)] + ).decode() + + +# pylint: disable=unused-argument + + +def _hexify(data, chunksize=_chunksize, separator=b" ", **kw): + """Convert a binary string into its hex encoding, broken up into chunks + of chunksize characters separated by a separator. + """ + + return _wordbreak(binascii.hexlify(data), chunksize, separator) + + +def _base64ify(data, chunksize=_chunksize, separator=b" ", **kw): + """Convert a binary string into its base64 encoding, broken up into chunks + of chunksize characters separated by a separator. + """ + + return _wordbreak(base64.b64encode(data), chunksize, separator) + + +# pylint: enable=unused-argument + +__escaped = b'"\\' + + +def _escapify(qstring): + """Escape the characters in a quoted string which need it.""" + + if isinstance(qstring, str): + qstring = qstring.encode() + if not isinstance(qstring, bytearray): + qstring = bytearray(qstring) + + text = "" + for c in qstring: + if c in __escaped: + text += "\\" + chr(c) + elif c >= 0x20 and c < 0x7F: + text += chr(c) + else: + text += "\\%03d" % c + return text + + +def _truncate_bitmap(what): + """Determine the index of greatest byte that isn't all zeros, and + return the bitmap that contains all the bytes less than that index. + """ + + for i in range(len(what) - 1, -1, -1): + if what[i] != 0: + return what[0 : i + 1] + return what[0:1] + + +# So we don't have to edit all the rdata classes... +_constify = dns.immutable.constify + + +@dns.immutable.immutable +class Rdata: + """Base class for all DNS rdata types.""" + + __slots__ = ["rdclass", "rdtype", "rdcomment"] + + def __init__(self, rdclass, rdtype): + """Initialize an rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + """ + + self.rdclass = self._as_rdataclass(rdclass) + self.rdtype = self._as_rdatatype(rdtype) + self.rdcomment = None + + def _get_all_slots(self): + return itertools.chain.from_iterable( + getattr(cls, "__slots__", []) for cls in self.__class__.__mro__ + ) + + def __getstate__(self): + # We used to try to do a tuple of all slots here, but it + # doesn't work as self._all_slots isn't available at + # __setstate__() time. Before that we tried to store a tuple + # of __slots__, but that didn't work as it didn't store the + # slots defined by ancestors. This older way didn't fail + # outright, but ended up with partially broken objects, e.g. + # if you unpickled an A RR it wouldn't have rdclass and rdtype + # attributes, and would compare badly. + state = {} + for slot in self._get_all_slots(): + state[slot] = getattr(self, slot) + return state + + def __setstate__(self, state): + for slot, val in state.items(): + object.__setattr__(self, slot, val) + if not hasattr(self, "rdcomment"): + # Pickled rdata from 2.0.x might not have a rdcomment, so add + # it if needed. + object.__setattr__(self, "rdcomment", None) + + def covers(self) -> dns.rdatatype.RdataType: + """Return the type a Rdata covers. + + DNS SIG/RRSIG rdatas apply to a specific type; this type is + returned by the covers() function. If the rdata type is not + SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when + creating rdatasets, allowing the rdataset to contain only RRSIGs + of a particular type, e.g. RRSIG(NS). + + Returns a ``dns.rdatatype.RdataType``. + """ + + return dns.rdatatype.NONE + + def extended_rdatatype(self) -> int: + """Return a 32-bit type value, the least significant 16 bits of + which are the ordinary DNS type, and the upper 16 bits of which are + the "covered" type, if any. + + Returns an ``int``. + """ + + return self.covers() << 16 | self.rdtype + + def to_text( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any] + ) -> str: + """Convert an rdata to text format. + + Returns a ``str``. + """ + + raise NotImplementedError # pragma: no cover + + def _to_wire( + self, + file: Optional[Any], + compress: Optional[dns.name.CompressType] = None, + origin: Optional[dns.name.Name] = None, + canonicalize: bool = False, + ) -> bytes: + raise NotImplementedError # pragma: no cover + + def to_wire( + self, + file: Optional[Any] = None, + compress: Optional[dns.name.CompressType] = None, + origin: Optional[dns.name.Name] = None, + canonicalize: bool = False, + ) -> bytes: + """Convert an rdata to wire format. + + Returns a ``bytes`` or ``None``. + """ + + if file: + return self._to_wire(file, compress, origin, canonicalize) + else: + f = io.BytesIO() + self._to_wire(f, compress, origin, canonicalize) + return f.getvalue() + + def to_generic( + self, origin: Optional[dns.name.Name] = None + ) -> "dns.rdata.GenericRdata": + """Creates a dns.rdata.GenericRdata equivalent of this rdata. + + Returns a ``dns.rdata.GenericRdata``. + """ + return dns.rdata.GenericRdata( + self.rdclass, self.rdtype, self.to_wire(origin=origin) + ) + + def to_digestable(self, origin: Optional[dns.name.Name] = None) -> bytes: + """Convert rdata to a format suitable for digesting in hashes. This + is also the DNSSEC canonical form. + + Returns a ``bytes``. + """ + + return self.to_wire(origin=origin, canonicalize=True) + + def __repr__(self): + covers = self.covers() + if covers == dns.rdatatype.NONE: + ctext = "" + else: + ctext = "(" + dns.rdatatype.to_text(covers) + ")" + return ( + "<DNS " + + dns.rdataclass.to_text(self.rdclass) + + " " + + dns.rdatatype.to_text(self.rdtype) + + ctext + + " rdata: " + + str(self) + + ">" + ) + + def __str__(self): + return self.to_text() + + def _cmp(self, other): + """Compare an rdata with another rdata of the same rdtype and + rdclass. + + For rdata with only absolute names: + Return < 0 if self < other in the DNSSEC ordering, 0 if self + == other, and > 0 if self > other. + For rdata with at least one relative names: + The rdata sorts before any rdata with only absolute names. + When compared with another relative rdata, all names are + made absolute as if they were relative to the root, as the + proper origin is not available. While this creates a stable + ordering, it is NOT guaranteed to be the DNSSEC ordering. + In the future, all ordering comparisons for rdata with + relative names will be disallowed. + """ + try: + our = self.to_digestable() + our_relative = False + except dns.name.NeedAbsoluteNameOrOrigin: + if _allow_relative_comparisons: + our = self.to_digestable(dns.name.root) + our_relative = True + try: + their = other.to_digestable() + their_relative = False + except dns.name.NeedAbsoluteNameOrOrigin: + if _allow_relative_comparisons: + their = other.to_digestable(dns.name.root) + their_relative = True + if _allow_relative_comparisons: + if our_relative != their_relative: + # For the purpose of comparison, all rdata with at least one + # relative name is less than an rdata with only absolute names. + if our_relative: + return -1 + else: + return 1 + elif our_relative or their_relative: + raise NoRelativeRdataOrdering + if our == their: + return 0 + elif our > their: + return 1 + else: + return -1 + + def __eq__(self, other): + if not isinstance(other, Rdata): + return False + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return False + our_relative = False + their_relative = False + try: + our = self.to_digestable() + except dns.name.NeedAbsoluteNameOrOrigin: + our = self.to_digestable(dns.name.root) + our_relative = True + try: + their = other.to_digestable() + except dns.name.NeedAbsoluteNameOrOrigin: + their = other.to_digestable(dns.name.root) + their_relative = True + if our_relative != their_relative: + return False + return our == their + + def __ne__(self, other): + if not isinstance(other, Rdata): + return True + if self.rdclass != other.rdclass or self.rdtype != other.rdtype: + return True + return not self.__eq__(other) + + def __lt__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) < 0 + + def __le__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) <= 0 + + def __ge__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) >= 0 + + def __gt__(self, other): + if ( + not isinstance(other, Rdata) + or self.rdclass != other.rdclass + or self.rdtype != other.rdtype + ): + return NotImplemented + return self._cmp(other) > 0 + + def __hash__(self): + return hash(self.to_digestable(dns.name.root)) + + @classmethod + def from_text( + cls, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + tok: dns.tokenizer.Tokenizer, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, + ) -> "Rdata": + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser( + cls, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + parser: dns.wire.Parser, + origin: Optional[dns.name.Name] = None, + ) -> "Rdata": + raise NotImplementedError # pragma: no cover + + def replace(self, **kwargs: Any) -> "Rdata": + """ + Create a new Rdata instance based on the instance replace was + invoked on. It is possible to pass different parameters to + override the corresponding properties of the base Rdata. + + Any field specific to the Rdata type can be replaced, but the + *rdtype* and *rdclass* fields cannot. + + Returns an instance of the same Rdata subclass as *self*. + """ + + # Get the constructor parameters. + parameters = inspect.signature(self.__init__).parameters # type: ignore + + # Ensure that all of the arguments correspond to valid fields. + # Don't allow rdclass or rdtype to be changed, though. + for key in kwargs: + if key == "rdcomment": + continue + if key not in parameters: + raise AttributeError( + "'{}' object has no attribute '{}'".format( + self.__class__.__name__, key + ) + ) + if key in ("rdclass", "rdtype"): + raise AttributeError( + "Cannot overwrite '{}' attribute '{}'".format( + self.__class__.__name__, key + ) + ) + + # Construct the parameter list. For each field, use the value in + # kwargs if present, and the current value otherwise. + args = (kwargs.get(key, getattr(self, key)) for key in parameters) + + # Create, validate, and return the new object. + rd = self.__class__(*args) + # The comment is not set in the constructor, so give it special + # handling. + rdcomment = kwargs.get("rdcomment", self.rdcomment) + if rdcomment is not None: + object.__setattr__(rd, "rdcomment", rdcomment) + return rd + + # Type checking and conversion helpers. These are class methods as + # they don't touch object state and may be useful to others. + + @classmethod + def _as_rdataclass(cls, value): + return dns.rdataclass.RdataClass.make(value) + + @classmethod + def _as_rdatatype(cls, value): + return dns.rdatatype.RdataType.make(value) + + @classmethod + def _as_bytes( + cls, + value: Any, + encode: bool = False, + max_length: Optional[int] = None, + empty_ok: bool = True, + ) -> bytes: + if encode and isinstance(value, str): + bvalue = value.encode() + elif isinstance(value, bytearray): + bvalue = bytes(value) + elif isinstance(value, bytes): + bvalue = value + else: + raise ValueError("not bytes") + if max_length is not None and len(bvalue) > max_length: + raise ValueError("too long") + if not empty_ok and len(bvalue) == 0: + raise ValueError("empty bytes not allowed") + return bvalue + + @classmethod + def _as_name(cls, value): + # Note that proper name conversion (e.g. with origin and IDNA + # awareness) is expected to be done via from_text. This is just + # a simple thing for people invoking the constructor directly. + if isinstance(value, str): + return dns.name.from_text(value) + elif not isinstance(value, dns.name.Name): + raise ValueError("not a name") + return value + + @classmethod + def _as_uint8(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 255: + raise ValueError("not a uint8") + return value + + @classmethod + def _as_uint16(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 65535: + raise ValueError("not a uint16") + return value + + @classmethod + def _as_uint32(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 4294967295: + raise ValueError("not a uint32") + return value + + @classmethod + def _as_uint48(cls, value): + if not isinstance(value, int): + raise ValueError("not an integer") + if value < 0 or value > 281474976710655: + raise ValueError("not a uint48") + return value + + @classmethod + def _as_int(cls, value, low=None, high=None): + if not isinstance(value, int): + raise ValueError("not an integer") + if low is not None and value < low: + raise ValueError("value too small") + if high is not None and value > high: + raise ValueError("value too large") + return value + + @classmethod + def _as_ipv4_address(cls, value): + if isinstance(value, str): + # call to check validity + dns.ipv4.inet_aton(value) + return value + elif isinstance(value, bytes): + return dns.ipv4.inet_ntoa(value) + else: + raise ValueError("not an IPv4 address") + + @classmethod + def _as_ipv6_address(cls, value): + if isinstance(value, str): + # call to check validity + dns.ipv6.inet_aton(value) + return value + elif isinstance(value, bytes): + return dns.ipv6.inet_ntoa(value) + else: + raise ValueError("not an IPv6 address") + + @classmethod + def _as_bool(cls, value): + if isinstance(value, bool): + return value + else: + raise ValueError("not a boolean") + + @classmethod + def _as_ttl(cls, value): + if isinstance(value, int): + return cls._as_int(value, 0, dns.ttl.MAX_TTL) + elif isinstance(value, str): + return dns.ttl.from_text(value) + else: + raise ValueError("not a TTL") + + @classmethod + def _as_tuple(cls, value, as_value): + try: + # For user convenience, if value is a singleton of the list + # element type, wrap it in a tuple. + return (as_value(value),) + except Exception: + # Otherwise, check each element of the iterable *value* + # against *as_value*. + return tuple(as_value(v) for v in value) + + # Processing order + + @classmethod + def _processing_order(cls, iterable): + items = list(iterable) + random.shuffle(items) + return items + + +@dns.immutable.immutable +class GenericRdata(Rdata): + + """Generic Rdata Class + + This class is used for rdata types for which we have no better + implementation. It implements the DNS "unknown RRs" scheme. + """ + + __slots__ = ["data"] + + def __init__(self, rdclass, rdtype, data): + super().__init__(rdclass, rdtype) + self.data = data + + def to_text( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any] + ) -> str: + return r"\# %d " % len(self.data) + _hexify(self.data, **kw) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + token = tok.get() + if not token.is_identifier() or token.value != r"\#": + raise dns.exception.SyntaxError(r"generic rdata does not start with \#") + length = tok.get_int() + hex = tok.concatenate_remaining_identifiers(True).encode() + data = binascii.unhexlify(hex) + if len(data) != length: + raise dns.exception.SyntaxError("generic rdata hex data has wrong length") + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.data) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + return cls(rdclass, rdtype, parser.get_remaining()) + + +_rdata_classes: Dict[ + Tuple[dns.rdataclass.RdataClass, dns.rdatatype.RdataType], Any +] = {} +_module_prefix = "dns.rdtypes" + + +def get_rdata_class(rdclass, rdtype): + cls = _rdata_classes.get((rdclass, rdtype)) + if not cls: + cls = _rdata_classes.get((dns.rdatatype.ANY, rdtype)) + if not cls: + rdclass_text = dns.rdataclass.to_text(rdclass) + rdtype_text = dns.rdatatype.to_text(rdtype) + rdtype_text = rdtype_text.replace("-", "_") + try: + mod = import_module( + ".".join([_module_prefix, rdclass_text, rdtype_text]) + ) + cls = getattr(mod, rdtype_text) + _rdata_classes[(rdclass, rdtype)] = cls + except ImportError: + try: + mod = import_module(".".join([_module_prefix, "ANY", rdtype_text])) + cls = getattr(mod, rdtype_text) + _rdata_classes[(dns.rdataclass.ANY, rdtype)] = cls + _rdata_classes[(rdclass, rdtype)] = cls + except ImportError: + pass + if not cls: + cls = GenericRdata + _rdata_classes[(rdclass, rdtype)] = cls + return cls + + +def from_text( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + tok: Union[dns.tokenizer.Tokenizer, str], + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, + idna_codec: Optional[dns.name.IDNACodec] = None, +) -> Rdata: + """Build an rdata object from text format. + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_text() class method is called + with the parameters to this function. + + If *tok* is a ``str``, then a tokenizer is created and the string + is used as its input. + + *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. + + *tok*, a ``dns.tokenizer.Tokenizer`` or a ``str``. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use if a tokenizer needs to be created. If + ``None``, the default IDNA 2003 encoder/decoder is used. If a + tokenizer is not created, then the codec associated with the tokenizer + is the one that is used. + + Returns an instance of the chosen Rdata subclass. + + """ + if isinstance(tok, str): + tok = dns.tokenizer.Tokenizer(tok, idna_codec=idna_codec) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + cls = get_rdata_class(rdclass, rdtype) + with dns.exception.ExceptionWrapper(dns.exception.SyntaxError): + rdata = None + if cls != GenericRdata: + # peek at first token + token = tok.get() + tok.unget(token) + if token.is_identifier() and token.value == r"\#": + # + # Known type using the generic syntax. Extract the + # wire form from the generic syntax, and then run + # from_wire on it. + # + grdata = GenericRdata.from_text( + rdclass, rdtype, tok, origin, relativize, relativize_to + ) + rdata = from_wire( + rdclass, rdtype, grdata.data, 0, len(grdata.data), origin + ) + # + # If this comparison isn't equal, then there must have been + # compressed names in the wire format, which is an error, + # there being no reasonable context to decompress with. + # + rwire = rdata.to_wire() + if rwire != grdata.data: + raise dns.exception.SyntaxError( + "compressed data in " + "generic syntax form " + "of known rdatatype" + ) + if rdata is None: + rdata = cls.from_text( + rdclass, rdtype, tok, origin, relativize, relativize_to + ) + token = tok.get_eol_as_token() + if token.comment is not None: + object.__setattr__(rdata, "rdcomment", token.comment) + return rdata + + +def from_wire_parser( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + parser: dns.wire.Parser, + origin: Optional[dns.name.Name] = None, +) -> Rdata: + """Build an rdata object from wire format + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_wire() class method is called + with the parameters to this function. + + *rdclass*, a ``dns.rdataclass.RdataClass`` or ``str``, the rdataclass. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdatatype. + + *parser*, a ``dns.wire.Parser``, the parser, which should be + restricted to the rdata length. + + *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, + then names will be relativized to this origin. + + Returns an instance of the chosen Rdata subclass. + """ + + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + cls = get_rdata_class(rdclass, rdtype) + with dns.exception.ExceptionWrapper(dns.exception.FormError): + return cls.from_wire_parser(rdclass, rdtype, parser, origin) + + +def from_wire( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + wire: bytes, + current: int, + rdlen: int, + origin: Optional[dns.name.Name] = None, +) -> Rdata: + """Build an rdata object from wire format + + This function attempts to dynamically load a class which + implements the specified rdata class and type. If there is no + class-and-type-specific implementation, the GenericRdata class + is used. + + Once a class is chosen, its from_wire() class method is called + with the parameters to this function. + + *rdclass*, an ``int``, the rdataclass. + + *rdtype*, an ``int``, the rdatatype. + + *wire*, a ``bytes``, the wire-format message. + + *current*, an ``int``, the offset in wire of the beginning of + the rdata. + + *rdlen*, an ``int``, the length of the wire-format rdata + + *origin*, a ``dns.name.Name`` (or ``None``). If not ``None``, + then names will be relativized to this origin. + + Returns an instance of the chosen Rdata subclass. + """ + parser = dns.wire.Parser(wire, current) + with parser.restrict_to(rdlen): + return from_wire_parser(rdclass, rdtype, parser, origin) + + +class RdatatypeExists(dns.exception.DNSException): + """DNS rdatatype already exists.""" + + supp_kwargs = {"rdclass", "rdtype"} + fmt = ( + "The rdata type with class {rdclass:d} and rdtype {rdtype:d} " + + "already exists." + ) + + +def register_type( + implementation: Any, + rdtype: int, + rdtype_text: str, + is_singleton: bool = False, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, +) -> None: + """Dynamically register a module to handle an rdatatype. + + *implementation*, a module implementing the type in the usual dnspython + way. + + *rdtype*, an ``int``, the rdatatype to register. + + *rdtype_text*, a ``str``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + + *rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if + it applies to all classes. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + existing_cls = get_rdata_class(rdclass, rdtype) + if existing_cls != GenericRdata or dns.rdatatype.is_metatype(rdtype): + raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype) + _rdata_classes[(rdclass, rdtype)] = getattr( + implementation, rdtype_text.replace("-", "_") + ) + dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdataclass.py b/backend/test/lib/python3.8/site-packages/dns/rdataclass.py new file mode 100644 index 0000000000000000000000000000000000000000..89b85a79c27ca8eb40bd85d65a17b6280fc50a43 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdataclass.py @@ -0,0 +1,118 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Classes.""" + +import dns.enum +import dns.exception + + +class RdataClass(dns.enum.IntEnum): + """DNS Rdata Class""" + + RESERVED0 = 0 + IN = 1 + INTERNET = IN + CH = 3 + CHAOS = CH + HS = 4 + HESIOD = HS + NONE = 254 + ANY = 255 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "class" + + @classmethod + def _prefix(cls): + return "CLASS" + + @classmethod + def _unknown_exception_class(cls): + return UnknownRdataclass + + +_metaclasses = {RdataClass.NONE, RdataClass.ANY} + + +class UnknownRdataclass(dns.exception.DNSException): + """A DNS class is unknown.""" + + +def from_text(text: str) -> RdataClass: + """Convert text into a DNS rdata class value. + + The input text can be a defined DNS RR class mnemonic or + instance of the DNS generic class syntax. + + For example, "IN" and "CLASS1" will both result in a value of 1. + + Raises ``dns.rdatatype.UnknownRdataclass`` if the class is unknown. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns a ``dns.rdataclass.RdataClass``. + """ + + return RdataClass.from_text(text) + + +def to_text(value: RdataClass) -> str: + """Convert a DNS rdata class value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic class syntax will be used. + + Raises ``ValueError`` if the rdata class value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + return RdataClass.to_text(value) + + +def is_metaclass(rdclass: RdataClass) -> bool: + """True if the specified class is a metaclass. + + The currently defined metaclasses are ANY and NONE. + + *rdclass* is a ``dns.rdataclass.RdataClass``. + """ + + if rdclass in _metaclasses: + return True + return False + + +### BEGIN generated RdataClass constants + +RESERVED0 = RdataClass.RESERVED0 +IN = RdataClass.IN +INTERNET = RdataClass.INTERNET +CH = RdataClass.CH +CHAOS = RdataClass.CHAOS +HS = RdataClass.HS +HESIOD = RdataClass.HESIOD +NONE = RdataClass.NONE +ANY = RdataClass.ANY + +### END generated RdataClass constants diff --git a/backend/test/lib/python3.8/site-packages/dns/rdataset.py b/backend/test/lib/python3.8/site-packages/dns/rdataset.py new file mode 100644 index 0000000000000000000000000000000000000000..31124afcc46f013cdaa8ac1bebda62813dab6b14 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdataset.py @@ -0,0 +1,526 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)""" + +import io +import random +import struct +from typing import Any, Collection, Dict, List, Optional, Union, cast + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.set +import dns.ttl + +# define SimpleSet here for backwards compatibility +SimpleSet = dns.set.Set + + +class DifferingCovers(dns.exception.DNSException): + """An attempt was made to add a DNS SIG/RRSIG whose covered type + is not the same as that of the other rdatas in the rdataset.""" + + +class IncompatibleTypes(dns.exception.DNSException): + """An attempt was made to add DNS RR data of an incompatible type.""" + + +class Rdataset(dns.set.Set): + + """A DNS rdataset.""" + + __slots__ = ["rdclass", "rdtype", "covers", "ttl"] + + def __init__( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ttl: int = 0, + ): + """Create a new rdataset of the specified class and type. + + *rdclass*, a ``dns.rdataclass.RdataClass``, the rdataclass. + + *rdtype*, an ``dns.rdatatype.RdataType``, the rdatatype. + + *covers*, an ``dns.rdatatype.RdataType``, the covered rdatatype. + + *ttl*, an ``int``, the TTL. + """ + + super().__init__() + self.rdclass = rdclass + self.rdtype: dns.rdatatype.RdataType = rdtype + self.covers: dns.rdatatype.RdataType = covers + self.ttl = ttl + + def _clone(self): + obj = super()._clone() + obj.rdclass = self.rdclass + obj.rdtype = self.rdtype + obj.covers = self.covers + obj.ttl = self.ttl + return obj + + def update_ttl(self, ttl: int) -> None: + """Perform TTL minimization. + + Set the TTL of the rdataset to be the lesser of the set's current + TTL or the specified TTL. If the set contains no rdatas, set the TTL + to the specified TTL. + + *ttl*, an ``int`` or ``str``. + """ + ttl = dns.ttl.make(ttl) + if len(self) == 0: + self.ttl = ttl + elif ttl < self.ttl: + self.ttl = ttl + + def add( # pylint: disable=arguments-differ,arguments-renamed + self, rd: dns.rdata.Rdata, ttl: Optional[int] = None + ) -> None: + """Add the specified rdata to the rdataset. + + If the optional *ttl* parameter is supplied, then + ``self.update_ttl(ttl)`` will be called prior to adding the rdata. + + *rd*, a ``dns.rdata.Rdata``, the rdata + + *ttl*, an ``int``, the TTL. + + Raises ``dns.rdataset.IncompatibleTypes`` if the type and class + do not match the type and class of the rdataset. + + Raises ``dns.rdataset.DifferingCovers`` if the type is a signature + type and the covered type does not match that of the rdataset. + """ + + # + # If we're adding a signature, do some special handling to + # check that the signature covers the same type as the + # other rdatas in this rdataset. If this is the first rdata + # in the set, initialize the covers field. + # + if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype: + raise IncompatibleTypes + if ttl is not None: + self.update_ttl(ttl) + if self.rdtype == dns.rdatatype.RRSIG or self.rdtype == dns.rdatatype.SIG: + covers = rd.covers() + if len(self) == 0 and self.covers == dns.rdatatype.NONE: + self.covers = covers + elif self.covers != covers: + raise DifferingCovers + if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0: + self.clear() + super().add(rd) + + def union_update(self, other): + self.update_ttl(other.ttl) + super().union_update(other) + + def intersection_update(self, other): + self.update_ttl(other.ttl) + super().intersection_update(other) + + def update(self, other): + """Add all rdatas in other to self. + + *other*, a ``dns.rdataset.Rdataset``, the rdataset from which + to update. + """ + + self.update_ttl(other.ttl) + super().update(other) + + def _rdata_repr(self): + def maybe_truncate(s): + if len(s) > 100: + return s[:100] + "..." + return s + + return "[%s]" % ", ".join("<%s>" % maybe_truncate(str(rr)) for rr in self) + + def __repr__(self): + if self.covers == 0: + ctext = "" + else: + ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" + return ( + "<DNS " + + dns.rdataclass.to_text(self.rdclass) + + " " + + dns.rdatatype.to_text(self.rdtype) + + ctext + + " rdataset: " + + self._rdata_repr() + + ">" + ) + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if not isinstance(other, Rdataset): + return False + if ( + self.rdclass != other.rdclass + or self.rdtype != other.rdtype + or self.covers != other.covers + ): + return False + return super().__eq__(other) + + def __ne__(self, other): + return not self.__eq__(other) + + def to_text( + self, + name: Optional[dns.name.Name] = None, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + override_rdclass: Optional[dns.rdataclass.RdataClass] = None, + want_comments: bool = False, + **kw: Dict[str, Any], + ) -> str: + """Convert the rdataset into DNS zone file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *name*, a ``dns.name.Name``. If name is not ``None``, emit RRs with + *name* as the owner name. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + + *override_rdclass*, a ``dns.rdataclass.RdataClass`` or ``None``. + If not ``None``, use this class instead of the Rdataset's class. + + *want_comments*, a ``bool``. If ``True``, emit comments for rdata + which have them. The default is ``False``. + """ + + if name is not None: + name = name.choose_relativity(origin, relativize) + ntext = str(name) + pad = " " + else: + ntext = "" + pad = "" + s = io.StringIO() + if override_rdclass is not None: + rdclass = override_rdclass + else: + rdclass = self.rdclass + if len(self) == 0: + # + # Empty rdatasets are used for the question section, and in + # some dynamic updates, so we don't need to print out the TTL + # (which is meaningless anyway). + # + s.write( + "{}{}{} {}\n".format( + ntext, + pad, + dns.rdataclass.to_text(rdclass), + dns.rdatatype.to_text(self.rdtype), + ) + ) + else: + for rd in self: + extra = "" + if want_comments: + if rd.rdcomment: + extra = f" ;{rd.rdcomment}" + s.write( + "%s%s%d %s %s %s%s\n" + % ( + ntext, + pad, + self.ttl, + dns.rdataclass.to_text(rdclass), + dns.rdatatype.to_text(self.rdtype), + rd.to_text(origin=origin, relativize=relativize, **kw), + extra, + ) + ) + # + # We strip off the final \n for the caller's convenience in printing + # + return s.getvalue()[:-1] + + def to_wire( + self, + name: dns.name.Name, + file: Any, + compress: Optional[dns.name.CompressType] = None, + origin: Optional[dns.name.Name] = None, + override_rdclass: Optional[dns.rdataclass.RdataClass] = None, + want_shuffle: bool = True, + ) -> int: + """Convert the rdataset to wire format. + + *name*, a ``dns.name.Name`` is the owner name to use. + + *file* is the file where the name is emitted (typically a + BytesIO file). + + *compress*, a ``dict``, is the compression table to use. If + ``None`` (the default), names will not be compressed. + + *origin* is a ``dns.name.Name`` or ``None``. If the name is + relative and origin is not ``None``, then *origin* will be appended + to it. + + *override_rdclass*, an ``int``, is used as the class instead of the + class of the rdataset. This is useful when rendering rdatasets + associated with dynamic updates. + + *want_shuffle*, a ``bool``. If ``True``, then the order of the + Rdatas within the Rdataset will be shuffled before rendering. + + Returns an ``int``, the number of records emitted. + """ + + if override_rdclass is not None: + rdclass = override_rdclass + want_shuffle = False + else: + rdclass = self.rdclass + file.seek(0, io.SEEK_END) + if len(self) == 0: + name.to_wire(file, compress, origin) + stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0) + file.write(stuff) + return 1 + else: + l: Union[Rdataset, List[dns.rdata.Rdata]] + if want_shuffle: + l = list(self) + random.shuffle(l) + else: + l = self + for rd in l: + name.to_wire(file, compress, origin) + stuff = struct.pack("!HHIH", self.rdtype, rdclass, self.ttl, 0) + file.write(stuff) + start = file.tell() + rd.to_wire(file, compress, origin) + end = file.tell() + assert end - start < 65536 + file.seek(start - 2) + stuff = struct.pack("!H", end - start) + file.write(stuff) + file.seek(0, io.SEEK_END) + return len(self) + + def match( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> bool: + """Returns ``True`` if this rdataset matches the specified class, + type, and covers. + """ + if self.rdclass == rdclass and self.rdtype == rdtype and self.covers == covers: + return True + return False + + def processing_order(self) -> List[dns.rdata.Rdata]: + """Return rdatas in a valid processing order according to the type's + specification. For example, MX records are in preference order from + lowest to highest preferences, with items of the same preference + shuffled. + + For types that do not define a processing order, the rdatas are + simply shuffled. + """ + if len(self) == 0: + return [] + else: + return self[0]._processing_order(iter(self)) + + +@dns.immutable.immutable +class ImmutableRdataset(Rdataset): # lgtm[py/missing-equals] + + """An immutable DNS rdataset.""" + + _clone_class = Rdataset + + def __init__(self, rdataset: Rdataset): + """Create an immutable rdataset from the specified rdataset.""" + + super().__init__( + rdataset.rdclass, rdataset.rdtype, rdataset.covers, rdataset.ttl + ) + self.items = dns.immutable.Dict(rdataset.items) + + def update_ttl(self, ttl): + raise TypeError("immutable") + + def add(self, rd, ttl=None): + raise TypeError("immutable") + + def union_update(self, other): + raise TypeError("immutable") + + def intersection_update(self, other): + raise TypeError("immutable") + + def update(self, other): + raise TypeError("immutable") + + def __delitem__(self, i): + raise TypeError("immutable") + + # lgtm complains about these not raising ArithmeticError, but there is + # precedent for overrides of these methods in other classes to raise + # TypeError, and it seems like the better exception. + + def __ior__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def __iand__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def __iadd__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def __isub__(self, other): # lgtm[py/unexpected-raise-in-special-method] + raise TypeError("immutable") + + def clear(self): + raise TypeError("immutable") + + def __copy__(self): + return ImmutableRdataset(super().copy()) + + def copy(self): + return ImmutableRdataset(super().copy()) + + def union(self, other): + return ImmutableRdataset(super().union(other)) + + def intersection(self, other): + return ImmutableRdataset(super().intersection(other)) + + def difference(self, other): + return ImmutableRdataset(super().difference(other)) + + def symmetric_difference(self, other): + return ImmutableRdataset(super().symmetric_difference(other)) + + +def from_text_list( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + ttl: int, + text_rdatas: Collection[str], + idna_codec: Optional[dns.name.IDNACodec] = None, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, +) -> Rdataset: + """Create an rdataset with the specified class, type, and TTL, and with + the specified list of rdatas in text format. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + r = Rdataset(rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text( + r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec + ) + r.add(rd) + return r + + +def from_text( + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + ttl: int, + *text_rdatas: Any, +) -> Rdataset: + """Create an rdataset with the specified class, type, and TTL, and with + the specified rdatas in text format. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_text_list(rdclass, rdtype, ttl, cast(Collection[str], text_rdatas)) + + +def from_rdata_list(ttl: int, rdatas: Collection[dns.rdata.Rdata]) -> Rdataset: + """Create an rdataset with the specified TTL, and with + the specified list of rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = Rdataset(rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + assert r is not None + return r + + +def from_rdata(ttl: int, *rdatas: Any) -> Rdataset: + """Create an rdataset with the specified TTL, and with + the specified rdata objects. + + Returns a ``dns.rdataset.Rdataset`` object. + """ + + return from_rdata_list(ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdatatype.py b/backend/test/lib/python3.8/site-packages/dns/rdatatype.py new file mode 100644 index 0000000000000000000000000000000000000000..e6c581867bcc7cd7e806ea92c3dab28f0d021d3e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdatatype.py @@ -0,0 +1,332 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Rdata Types.""" + +from typing import Dict + +import dns.enum +import dns.exception + + +class RdataType(dns.enum.IntEnum): + """DNS Rdata Type""" + + TYPE0 = 0 + NONE = 0 + A = 1 + NS = 2 + MD = 3 + MF = 4 + CNAME = 5 + SOA = 6 + MB = 7 + MG = 8 + MR = 9 + NULL = 10 + WKS = 11 + PTR = 12 + HINFO = 13 + MINFO = 14 + MX = 15 + TXT = 16 + RP = 17 + AFSDB = 18 + X25 = 19 + ISDN = 20 + RT = 21 + NSAP = 22 + NSAP_PTR = 23 + SIG = 24 + KEY = 25 + PX = 26 + GPOS = 27 + AAAA = 28 + LOC = 29 + NXT = 30 + SRV = 33 + NAPTR = 35 + KX = 36 + CERT = 37 + A6 = 38 + DNAME = 39 + OPT = 41 + APL = 42 + DS = 43 + SSHFP = 44 + IPSECKEY = 45 + RRSIG = 46 + NSEC = 47 + DNSKEY = 48 + DHCID = 49 + NSEC3 = 50 + NSEC3PARAM = 51 + TLSA = 52 + SMIMEA = 53 + HIP = 55 + NINFO = 56 + CDS = 59 + CDNSKEY = 60 + OPENPGPKEY = 61 + CSYNC = 62 + ZONEMD = 63 + SVCB = 64 + HTTPS = 65 + SPF = 99 + UNSPEC = 103 + NID = 104 + L32 = 105 + L64 = 106 + LP = 107 + EUI48 = 108 + EUI64 = 109 + TKEY = 249 + TSIG = 250 + IXFR = 251 + AXFR = 252 + MAILB = 253 + MAILA = 254 + ANY = 255 + URI = 256 + CAA = 257 + AVC = 258 + AMTRELAY = 260 + TA = 32768 + DLV = 32769 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "type" + + @classmethod + def _prefix(cls): + return "TYPE" + + @classmethod + def _extra_from_text(cls, text): + if text.find("-") >= 0: + try: + return cls[text.replace("-", "_")] + except KeyError: + pass + return _registered_by_text.get(text) + + @classmethod + def _extra_to_text(cls, value, current_text): + if current_text is None: + return _registered_by_value.get(value) + if current_text.find("_") >= 0: + return current_text.replace("_", "-") + return current_text + + @classmethod + def _unknown_exception_class(cls): + return UnknownRdatatype + + +_registered_by_text: Dict[str, RdataType] = {} +_registered_by_value: Dict[RdataType, str] = {} + +_metatypes = {RdataType.OPT} + +_singletons = { + RdataType.SOA, + RdataType.NXT, + RdataType.DNAME, + RdataType.NSEC, + RdataType.CNAME, +} + + +class UnknownRdatatype(dns.exception.DNSException): + """DNS resource record type is unknown.""" + + +def from_text(text: str) -> RdataType: + """Convert text into a DNS rdata type value. + + The input text can be a defined DNS RR type mnemonic or + instance of the DNS generic type syntax. + + For example, "NS" and "TYPE2" will both result in a value of 2. + + Raises ``dns.rdatatype.UnknownRdatatype`` if the type is unknown. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns a ``dns.rdatatype.RdataType``. + """ + + return RdataType.from_text(text) + + +def to_text(value: RdataType) -> str: + """Convert a DNS rdata type value to text. + + If the value has a known mnemonic, it will be used, otherwise the + DNS generic type syntax will be used. + + Raises ``ValueError`` if the rdata type value is not >= 0 and <= 65535. + + Returns a ``str``. + """ + + return RdataType.to_text(value) + + +def is_metatype(rdtype: RdataType) -> bool: + """True if the specified type is a metatype. + + *rdtype* is a ``dns.rdatatype.RdataType``. + + The currently defined metatypes are TKEY, TSIG, IXFR, AXFR, MAILA, + MAILB, ANY, and OPT. + + Returns a ``bool``. + """ + + return (256 > rdtype >= 128) or rdtype in _metatypes + + +def is_singleton(rdtype: RdataType) -> bool: + """Is the specified type a singleton type? + + Singleton types can only have a single rdata in an rdataset, or a single + RR in an RRset. + + The currently defined singleton types are CNAME, DNAME, NSEC, NXT, and + SOA. + + *rdtype* is an ``int``. + + Returns a ``bool``. + """ + + if rdtype in _singletons: + return True + return False + + +# pylint: disable=redefined-outer-name +def register_type( + rdtype: RdataType, rdtype_text: str, is_singleton: bool = False +) -> None: + """Dynamically register an rdatatype. + + *rdtype*, a ``dns.rdatatype.RdataType``, the rdatatype to register. + + *rdtype_text*, a ``str``, the textual form of the rdatatype. + + *is_singleton*, a ``bool``, indicating if the type is a singleton (i.e. + RRsets of the type can have only one member.) + """ + + _registered_by_text[rdtype_text] = rdtype + _registered_by_value[rdtype] = rdtype_text + if is_singleton: + _singletons.add(rdtype) + + +### BEGIN generated RdataType constants + +TYPE0 = RdataType.TYPE0 +NONE = RdataType.NONE +A = RdataType.A +NS = RdataType.NS +MD = RdataType.MD +MF = RdataType.MF +CNAME = RdataType.CNAME +SOA = RdataType.SOA +MB = RdataType.MB +MG = RdataType.MG +MR = RdataType.MR +NULL = RdataType.NULL +WKS = RdataType.WKS +PTR = RdataType.PTR +HINFO = RdataType.HINFO +MINFO = RdataType.MINFO +MX = RdataType.MX +TXT = RdataType.TXT +RP = RdataType.RP +AFSDB = RdataType.AFSDB +X25 = RdataType.X25 +ISDN = RdataType.ISDN +RT = RdataType.RT +NSAP = RdataType.NSAP +NSAP_PTR = RdataType.NSAP_PTR +SIG = RdataType.SIG +KEY = RdataType.KEY +PX = RdataType.PX +GPOS = RdataType.GPOS +AAAA = RdataType.AAAA +LOC = RdataType.LOC +NXT = RdataType.NXT +SRV = RdataType.SRV +NAPTR = RdataType.NAPTR +KX = RdataType.KX +CERT = RdataType.CERT +A6 = RdataType.A6 +DNAME = RdataType.DNAME +OPT = RdataType.OPT +APL = RdataType.APL +DS = RdataType.DS +SSHFP = RdataType.SSHFP +IPSECKEY = RdataType.IPSECKEY +RRSIG = RdataType.RRSIG +NSEC = RdataType.NSEC +DNSKEY = RdataType.DNSKEY +DHCID = RdataType.DHCID +NSEC3 = RdataType.NSEC3 +NSEC3PARAM = RdataType.NSEC3PARAM +TLSA = RdataType.TLSA +SMIMEA = RdataType.SMIMEA +HIP = RdataType.HIP +NINFO = RdataType.NINFO +CDS = RdataType.CDS +CDNSKEY = RdataType.CDNSKEY +OPENPGPKEY = RdataType.OPENPGPKEY +CSYNC = RdataType.CSYNC +ZONEMD = RdataType.ZONEMD +SVCB = RdataType.SVCB +HTTPS = RdataType.HTTPS +SPF = RdataType.SPF +UNSPEC = RdataType.UNSPEC +NID = RdataType.NID +L32 = RdataType.L32 +L64 = RdataType.L64 +LP = RdataType.LP +EUI48 = RdataType.EUI48 +EUI64 = RdataType.EUI64 +TKEY = RdataType.TKEY +TSIG = RdataType.TSIG +IXFR = RdataType.IXFR +AXFR = RdataType.AXFR +MAILB = RdataType.MAILB +MAILA = RdataType.MAILA +ANY = RdataType.ANY +URI = RdataType.URI +CAA = RdataType.CAA +AVC = RdataType.AVC +AMTRELAY = RdataType.AMTRELAY +TA = RdataType.TA +DLV = RdataType.DLV + +### END generated RdataType constants diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/AFSDB.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/AFSDB.py new file mode 100644 index 0000000000000000000000000000000000000000..3d287f6e02e57731db9884eb26441774da8cde06 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/AFSDB.py @@ -0,0 +1,46 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class AFSDB(dns.rdtypes.mxbase.UncompressedDowncasingMX): + + """AFSDB record""" + + # Use the property mechanism to make "subtype" an alias for the + # "preference" attribute, and "hostname" an alias for the "exchange" + # attribute. + # + # This lets us inherit the UncompressedMX implementation but lets + # the caller use appropriate attribute names for the rdata type. + # + # We probably lose some performance vs. a cut-and-paste + # implementation, but this way we don't copy code, and that's + # good. + + @property + def subtype(self): + "the AFSDB subtype" + return self.preference + + @property + def hostname(self): + "the AFSDB hostname" + return self.exchange diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/AMTRELAY.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/AMTRELAY.py new file mode 100644 index 0000000000000000000000000000000000000000..dfe7abc3e5b62b27f8dad64d7211a0a14032da63 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/AMTRELAY.py @@ -0,0 +1,92 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdtypes.util + + +class Relay(dns.rdtypes.util.Gateway): + name = "AMTRELAY relay" + + @property + def relay(self): + return self.gateway + + +@dns.immutable.immutable +class AMTRELAY(dns.rdata.Rdata): + + """AMTRELAY record""" + + # see: RFC 8777 + + __slots__ = ["precedence", "discovery_optional", "relay_type", "relay"] + + def __init__( + self, rdclass, rdtype, precedence, discovery_optional, relay_type, relay + ): + super().__init__(rdclass, rdtype) + relay = Relay(relay_type, relay) + self.precedence = self._as_uint8(precedence) + self.discovery_optional = self._as_bool(discovery_optional) + self.relay_type = relay.type + self.relay = relay.relay + + def to_text(self, origin=None, relativize=True, **kw): + relay = Relay(self.relay_type, self.relay).to_text(origin, relativize) + return "%d %d %d %s" % ( + self.precedence, + self.discovery_optional, + self.relay_type, + relay, + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + precedence = tok.get_uint8() + discovery_optional = tok.get_uint8() + if discovery_optional > 1: + raise dns.exception.SyntaxError("expecting 0 or 1") + discovery_optional = bool(discovery_optional) + relay_type = tok.get_uint8() + if relay_type > 0x7F: + raise dns.exception.SyntaxError("expecting an integer <= 127") + relay = Relay.from_text(relay_type, tok, origin, relativize, relativize_to) + return cls( + rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + relay_type = self.relay_type | (self.discovery_optional << 7) + header = struct.pack("!BB", self.precedence, relay_type) + file.write(header) + Relay(self.relay_type, self.relay).to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (precedence, relay_type) = parser.get_struct("!BB") + discovery_optional = bool(relay_type >> 7) + relay_type &= 0x7F + relay = Relay.from_wire_parser(relay_type, parser, origin) + return cls( + rdclass, rdtype, precedence, discovery_optional, relay_type, relay.relay + ) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/AVC.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/AVC.py new file mode 100644 index 0000000000000000000000000000000000000000..766d5e2d7edd74d5d7effe16bc9c6c458c0a83ce --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/AVC.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class AVC(dns.rdtypes.txtbase.TXTBase): + + """AVC record""" + + # See: IANA dns parameters for AVC diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CAA.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CAA.py new file mode 100644 index 0000000000000000000000000000000000000000..8afb538c0505b5468a6e376c6c76ae4c98744c6f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CAA.py @@ -0,0 +1,72 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class CAA(dns.rdata.Rdata): + + """CAA (Certification Authority Authorization) record""" + + # see: RFC 6844 + + __slots__ = ["flags", "tag", "value"] + + def __init__(self, rdclass, rdtype, flags, tag, value): + super().__init__(rdclass, rdtype) + self.flags = self._as_uint8(flags) + self.tag = self._as_bytes(tag, True, 255) + if not tag.isalnum(): + raise ValueError("tag is not alphanumeric") + self.value = self._as_bytes(value) + + def to_text(self, origin=None, relativize=True, **kw): + return '%u %s "%s"' % ( + self.flags, + dns.rdata._escapify(self.tag), + dns.rdata._escapify(self.value), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + flags = tok.get_uint8() + tag = tok.get_string().encode() + value = tok.get_string().encode() + return cls(rdclass, rdtype, flags, tag, value) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!B", self.flags)) + l = len(self.tag) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.tag) + file.write(self.value) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + flags = parser.get_uint8() + tag = parser.get_counted_bytes() + value = parser.get_remaining() + return cls(rdclass, rdtype, flags, tag, value) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CDNSKEY.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CDNSKEY.py new file mode 100644 index 0000000000000000000000000000000000000000..38b8a8da1497c3519a538fb29bb139aa7faaf0bc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CDNSKEY.py @@ -0,0 +1,34 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] + +# pylint: disable=unused-import +from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] + REVOKE, + SEP, + ZONE, +) + +# pylint: enable=unused-import + + +@dns.immutable.immutable +class CDNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + + """CDNSKEY record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CDS.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CDS.py new file mode 100644 index 0000000000000000000000000000000000000000..2ff42d9a1a2774dc0c1a629d8dbad4902258ad45 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CDS.py @@ -0,0 +1,30 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dsbase + + +@dns.immutable.immutable +class CDS(dns.rdtypes.dsbase.DSBase): + + """CDS record""" + + _digest_length_by_type = { + **dns.rdtypes.dsbase.DSBase._digest_length_by_type, + 0: 1, # delete, RFC 8078 Sec. 4 (including Errata ID 5049) + } diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CERT.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CERT.py new file mode 100644 index 0000000000000000000000000000000000000000..30fe863f4fa9cf8af21ffb780b749138668bbc42 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CERT.py @@ -0,0 +1,117 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.dnssectypes +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + +_ctype_by_value = { + 1: "PKIX", + 2: "SPKI", + 3: "PGP", + 4: "IPKIX", + 5: "ISPKI", + 6: "IPGP", + 7: "ACPKIX", + 8: "IACPKIX", + 253: "URI", + 254: "OID", +} + +_ctype_by_name = { + "PKIX": 1, + "SPKI": 2, + "PGP": 3, + "IPKIX": 4, + "ISPKI": 5, + "IPGP": 6, + "ACPKIX": 7, + "IACPKIX": 8, + "URI": 253, + "OID": 254, +} + + +def _ctype_from_text(what): + v = _ctype_by_name.get(what) + if v is not None: + return v + return int(what) + + +def _ctype_to_text(what): + v = _ctype_by_value.get(what) + if v is not None: + return v + return str(what) + + +@dns.immutable.immutable +class CERT(dns.rdata.Rdata): + + """CERT record""" + + # see RFC 4398 + + __slots__ = ["certificate_type", "key_tag", "algorithm", "certificate"] + + def __init__( + self, rdclass, rdtype, certificate_type, key_tag, algorithm, certificate + ): + super().__init__(rdclass, rdtype) + self.certificate_type = self._as_uint16(certificate_type) + self.key_tag = self._as_uint16(key_tag) + self.algorithm = self._as_uint8(algorithm) + self.certificate = self._as_bytes(certificate) + + def to_text(self, origin=None, relativize=True, **kw): + certificate_type = _ctype_to_text(self.certificate_type) + return "%s %d %s %s" % ( + certificate_type, + self.key_tag, + dns.dnssectypes.Algorithm.to_text(self.algorithm), + dns.rdata._base64ify(self.certificate, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + certificate_type = _ctype_from_text(tok.get_string()) + key_tag = tok.get_uint16() + algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) + b64 = tok.concatenate_remaining_identifiers().encode() + certificate = base64.b64decode(b64) + return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + prefix = struct.pack( + "!HHB", self.certificate_type, self.key_tag, self.algorithm + ) + file.write(prefix) + file.write(self.certificate) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (certificate_type, key_tag, algorithm) = parser.get_struct("!HHB") + certificate = parser.get_remaining() + return cls(rdclass, rdtype, certificate_type, key_tag, algorithm, certificate) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CNAME.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CNAME.py new file mode 100644 index 0000000000000000000000000000000000000000..759adb906d28f4e091b1d3d3576189f2c7b0033e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CNAME.py @@ -0,0 +1,29 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class CNAME(dns.rdtypes.nsbase.NSBase): + + """CNAME record + + Note: although CNAME is officially a singleton type, dnspython allows + non-singleton CNAME rdatasets because such sets have been commonly + used by BIND and other nameservers for load balancing.""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CSYNC.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CSYNC.py new file mode 100644 index 0000000000000000000000000000000000000000..315da9ffc77ce493e413b9faa3329f602383f508 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/CSYNC.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011, 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = "CSYNC" + + +@dns.immutable.immutable +class CSYNC(dns.rdata.Rdata): + + """CSYNC record""" + + __slots__ = ["serial", "flags", "windows"] + + def __init__(self, rdclass, rdtype, serial, flags, windows): + super().__init__(rdclass, rdtype) + self.serial = self._as_uint32(serial) + self.flags = self._as_uint16(flags) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + text = Bitmap(self.windows).to_text() + return "%d %d%s" % (self.serial, self.flags, text) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + serial = tok.get_uint32() + flags = tok.get_uint16() + bitmap = Bitmap.from_text(tok) + return cls(rdclass, rdtype, serial, flags, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!IH", self.serial, self.flags)) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (serial, flags) = parser.get_struct("!IH") + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, serial, flags, bitmap) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DLV.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DLV.py new file mode 100644 index 0000000000000000000000000000000000000000..632e90f8d38a7fb2fef5e069b04dca6ef1f9df64 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DLV.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dsbase + + +@dns.immutable.immutable +class DLV(dns.rdtypes.dsbase.DSBase): + + """DLV record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DNAME.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DNAME.py new file mode 100644 index 0000000000000000000000000000000000000000..556bff59e3de793c9321415897bad4a321e321d8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DNAME.py @@ -0,0 +1,28 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class DNAME(dns.rdtypes.nsbase.UncompressedNS): + + """DNAME record""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, None, origin, canonicalize) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DNSKEY.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DNSKEY.py new file mode 100644 index 0000000000000000000000000000000000000000..f1a63062f3b84d578af8e704bf43190dbb056424 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DNSKEY.py @@ -0,0 +1,34 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dnskeybase # lgtm[py/import-and-import-from] + +# pylint: disable=unused-import +from dns.rdtypes.dnskeybase import ( # noqa: F401 lgtm[py/unused-import] + REVOKE, + SEP, + ZONE, +) + +# pylint: enable=unused-import + + +@dns.immutable.immutable +class DNSKEY(dns.rdtypes.dnskeybase.DNSKEYBase): + + """DNSKEY record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DS.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DS.py new file mode 100644 index 0000000000000000000000000000000000000000..097ecfa0e1a3a375765aba427fd288448b692c44 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/DS.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.dsbase + + +@dns.immutable.immutable +class DS(dns.rdtypes.dsbase.DSBase): + + """DS record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/EUI48.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/EUI48.py new file mode 100644 index 0000000000000000000000000000000000000000..7e4e1ff3fcf4102854c14afde6e768f41ff6f559 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/EUI48.py @@ -0,0 +1,31 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek <pspacek@redhat.com> +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.euibase + + +@dns.immutable.immutable +class EUI48(dns.rdtypes.euibase.EUIBase): + + """EUI48 record""" + + # see: rfc7043.txt + + byte_len = 6 # 0123456789ab (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/EUI64.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/EUI64.py new file mode 100644 index 0000000000000000000000000000000000000000..68b5820f4fc02caa98f860ca44ebd8775275605c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/EUI64.py @@ -0,0 +1,31 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek <pspacek@redhat.com> +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.euibase + + +@dns.immutable.immutable +class EUI64(dns.rdtypes.euibase.EUIBase): + + """EUI64 record""" + + # see: rfc7043.txt + + byte_len = 8 # 0123456789abcdef (in hex) + text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab-cd-ef diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/GPOS.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/GPOS.py new file mode 100644 index 0000000000000000000000000000000000000000..30aab321ad73495006997f2fde4c8aae17cbbd0f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/GPOS.py @@ -0,0 +1,126 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +def _validate_float_string(what): + if len(what) == 0: + raise dns.exception.FormError + if what[0] == b"-"[0] or what[0] == b"+"[0]: + what = what[1:] + if what.isdigit(): + return + try: + (left, right) = what.split(b".") + except ValueError: + raise dns.exception.FormError + if left == b"" and right == b"": + raise dns.exception.FormError + if not left == b"" and not left.decode().isdigit(): + raise dns.exception.FormError + if not right == b"" and not right.decode().isdigit(): + raise dns.exception.FormError + + +@dns.immutable.immutable +class GPOS(dns.rdata.Rdata): + + """GPOS record""" + + # see: RFC 1712 + + __slots__ = ["latitude", "longitude", "altitude"] + + def __init__(self, rdclass, rdtype, latitude, longitude, altitude): + super().__init__(rdclass, rdtype) + if isinstance(latitude, float) or isinstance(latitude, int): + latitude = str(latitude) + if isinstance(longitude, float) or isinstance(longitude, int): + longitude = str(longitude) + if isinstance(altitude, float) or isinstance(altitude, int): + altitude = str(altitude) + latitude = self._as_bytes(latitude, True, 255) + longitude = self._as_bytes(longitude, True, 255) + altitude = self._as_bytes(altitude, True, 255) + _validate_float_string(latitude) + _validate_float_string(longitude) + _validate_float_string(altitude) + self.latitude = latitude + self.longitude = longitude + self.altitude = altitude + flat = self.float_latitude + if flat < -90.0 or flat > 90.0: + raise dns.exception.FormError("bad latitude") + flong = self.float_longitude + if flong < -180.0 or flong > 180.0: + raise dns.exception.FormError("bad longitude") + + def to_text(self, origin=None, relativize=True, **kw): + return "{} {} {}".format( + self.latitude.decode(), self.longitude.decode(), self.altitude.decode() + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + latitude = tok.get_string() + longitude = tok.get_string() + altitude = tok.get_string() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.latitude) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.latitude) + l = len(self.longitude) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.longitude) + l = len(self.altitude) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.altitude) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + latitude = parser.get_counted_bytes() + longitude = parser.get_counted_bytes() + altitude = parser.get_counted_bytes() + return cls(rdclass, rdtype, latitude, longitude, altitude) + + @property + def float_latitude(self): + "latitude as a floating point value" + return float(self.latitude) + + @property + def float_longitude(self): + "longitude as a floating point value" + return float(self.longitude) + + @property + def float_altitude(self): + "altitude as a floating point value" + return float(self.altitude) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/HINFO.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/HINFO.py new file mode 100644 index 0000000000000000000000000000000000000000..513c155a3260d69c9a35a902249f27746966c6ac --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/HINFO.py @@ -0,0 +1,67 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class HINFO(dns.rdata.Rdata): + + """HINFO record""" + + # see: RFC 1035 + + __slots__ = ["cpu", "os"] + + def __init__(self, rdclass, rdtype, cpu, os): + super().__init__(rdclass, rdtype) + self.cpu = self._as_bytes(cpu, True, 255) + self.os = self._as_bytes(os, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + return '"{}" "{}"'.format( + dns.rdata._escapify(self.cpu), dns.rdata._escapify(self.os) + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + cpu = tok.get_string(max_length=255) + os = tok.get_string(max_length=255) + return cls(rdclass, rdtype, cpu, os) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.cpu) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.cpu) + l = len(self.os) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.os) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + cpu = parser.get_counted_bytes() + os = parser.get_counted_bytes() + return cls(rdclass, rdtype, cpu, os) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/HIP.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/HIP.py new file mode 100644 index 0000000000000000000000000000000000000000..a20aa1e515303bf7fcbc13148322b4cde9ff312d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/HIP.py @@ -0,0 +1,86 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class HIP(dns.rdata.Rdata): + + """HIP record""" + + # see: RFC 5205 + + __slots__ = ["hit", "algorithm", "key", "servers"] + + def __init__(self, rdclass, rdtype, hit, algorithm, key, servers): + super().__init__(rdclass, rdtype) + self.hit = self._as_bytes(hit, True, 255) + self.algorithm = self._as_uint8(algorithm) + self.key = self._as_bytes(key, True) + self.servers = self._as_tuple(servers, self._as_name) + + def to_text(self, origin=None, relativize=True, **kw): + hit = binascii.hexlify(self.hit).decode() + key = base64.b64encode(self.key).replace(b"\n", b"").decode() + text = "" + servers = [] + for server in self.servers: + servers.append(server.choose_relativity(origin, relativize)) + if len(servers) > 0: + text += " " + " ".join((x.to_unicode() for x in servers)) + return "%u %s %s%s" % (self.algorithm, hit, key, text) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + hit = binascii.unhexlify(tok.get_string().encode()) + key = base64.b64decode(tok.get_string().encode()) + servers = [] + for token in tok.get_remaining(): + server = tok.as_name(token, origin, relativize, relativize_to) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + lh = len(self.hit) + lk = len(self.key) + file.write(struct.pack("!BBH", lh, self.algorithm, lk)) + file.write(self.hit) + file.write(self.key) + for server in self.servers: + server.to_wire(file, None, origin, False) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (lh, algorithm, lk) = parser.get_struct("!BBH") + hit = parser.get_bytes(lh) + key = parser.get_bytes(lk) + servers = [] + while parser.remaining() > 0: + server = parser.get_name(origin) + servers.append(server) + return cls(rdclass, rdtype, hit, algorithm, key, servers) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/ISDN.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/ISDN.py new file mode 100644 index 0000000000000000000000000000000000000000..536a35d61e841352d33431818364a8913ae5f5d6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/ISDN.py @@ -0,0 +1,78 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class ISDN(dns.rdata.Rdata): + + """ISDN record""" + + # see: RFC 1183 + + __slots__ = ["address", "subaddress"] + + def __init__(self, rdclass, rdtype, address, subaddress): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address, True, 255) + self.subaddress = self._as_bytes(subaddress, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + if self.subaddress: + return '"{}" "{}"'.format( + dns.rdata._escapify(self.address), dns.rdata._escapify(self.subaddress) + ) + else: + return '"%s"' % dns.rdata._escapify(self.address) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + tokens = tok.get_remaining(max_tokens=1) + if len(tokens) >= 1: + subaddress = tokens[0].unescape().value + else: + subaddress = "" + return cls(rdclass, rdtype, address, subaddress) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.address) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.address) + l = len(self.subaddress) + if l > 0: + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.subaddress) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_counted_bytes() + if parser.remaining() > 0: + subaddress = parser.get_counted_bytes() + else: + subaddress = b"" + return cls(rdclass, rdtype, address, subaddress) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/L32.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/L32.py new file mode 100644 index 0000000000000000000000000000000000000000..14be01f9d6ab9e57700954249093f866e98bdce4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/L32.py @@ -0,0 +1,42 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class L32(dns.rdata.Rdata): + + """L32 record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "locator32"] + + def __init__(self, rdclass, rdtype, preference, locator32): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.locator32 = self._as_ipv4_address(locator32) + + def to_text(self, origin=None, relativize=True, **kw): + return f"{self.preference} {self.locator32}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + nodeid = tok.get_identifier() + return cls(rdclass, rdtype, preference, nodeid) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + file.write(dns.ipv4.inet_aton(self.locator32)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + locator32 = parser.get_remaining() + return cls(rdclass, rdtype, preference, locator32) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/L64.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/L64.py new file mode 100644 index 0000000000000000000000000000000000000000..d083d4036c84eef23b99544781560e99f2469b0b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/L64.py @@ -0,0 +1,48 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdtypes.util + + +@dns.immutable.immutable +class L64(dns.rdata.Rdata): + + """L64 record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "locator64"] + + def __init__(self, rdclass, rdtype, preference, locator64): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + if isinstance(locator64, bytes): + if len(locator64) != 8: + raise ValueError("invalid locator64") + self.locator64 = dns.rdata._hexify(locator64, 4, b":") + else: + dns.rdtypes.util.parse_formatted_hex(locator64, 4, 4, ":") + self.locator64 = locator64 + + def to_text(self, origin=None, relativize=True, **kw): + return f"{self.preference} {self.locator64}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + locator64 = tok.get_identifier() + return cls(rdclass, rdtype, preference, locator64) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + file.write(dns.rdtypes.util.parse_formatted_hex(self.locator64, 4, 4, ":")) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + locator64 = parser.get_remaining() + return cls(rdclass, rdtype, preference, locator64) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/LOC.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/LOC.py new file mode 100644 index 0000000000000000000000000000000000000000..783d54af84c292400481f1cebbdcd6e187ca8bcb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/LOC.py @@ -0,0 +1,355 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata + +_pows = tuple(10**i for i in range(0, 11)) + +# default values are in centimeters +_default_size = 100.0 +_default_hprec = 1000000.0 +_default_vprec = 1000.0 + +# for use by from_wire() +_MAX_LATITUDE = 0x80000000 + 90 * 3600000 +_MIN_LATITUDE = 0x80000000 - 90 * 3600000 +_MAX_LONGITUDE = 0x80000000 + 180 * 3600000 +_MIN_LONGITUDE = 0x80000000 - 180 * 3600000 + + +def _exponent_of(what, desc): + if what == 0: + return 0 + exp = None + for i, pow in enumerate(_pows): + if what < pow: + exp = i - 1 + break + if exp is None or exp < 0: + raise dns.exception.SyntaxError("%s value out of bounds" % desc) + return exp + + +def _float_to_tuple(what): + if what < 0: + sign = -1 + what *= -1 + else: + sign = 1 + what = round(what * 3600000) + degrees = int(what // 3600000) + what -= degrees * 3600000 + minutes = int(what // 60000) + what -= minutes * 60000 + seconds = int(what // 1000) + what -= int(seconds * 1000) + what = int(what) + return (degrees, minutes, seconds, what, sign) + + +def _tuple_to_float(what): + value = float(what[0]) + value += float(what[1]) / 60.0 + value += float(what[2]) / 3600.0 + value += float(what[3]) / 3600000.0 + return float(what[4]) * value + + +def _encode_size(what, desc): + what = int(what) + exponent = _exponent_of(what, desc) & 0xF + base = what // pow(10, exponent) & 0xF + return base * 16 + exponent + + +def _decode_size(what, desc): + exponent = what & 0x0F + if exponent > 9: + raise dns.exception.FormError("bad %s exponent" % desc) + base = (what & 0xF0) >> 4 + if base > 9: + raise dns.exception.FormError("bad %s base" % desc) + return base * pow(10, exponent) + + +def _check_coordinate_list(value, low, high): + if value[0] < low or value[0] > high: + raise ValueError(f"not in range [{low}, {high}]") + if value[1] < 0 or value[1] > 59: + raise ValueError("bad minutes value") + if value[2] < 0 or value[2] > 59: + raise ValueError("bad seconds value") + if value[3] < 0 or value[3] > 999: + raise ValueError("bad milliseconds value") + if value[4] != 1 and value[4] != -1: + raise ValueError("bad hemisphere value") + + +@dns.immutable.immutable +class LOC(dns.rdata.Rdata): + + """LOC record""" + + # see: RFC 1876 + + __slots__ = [ + "latitude", + "longitude", + "altitude", + "size", + "horizontal_precision", + "vertical_precision", + ] + + def __init__( + self, + rdclass, + rdtype, + latitude, + longitude, + altitude, + size=_default_size, + hprec=_default_hprec, + vprec=_default_vprec, + ): + """Initialize a LOC record instance. + + The parameters I{latitude} and I{longitude} may be either a 4-tuple + of integers specifying (degrees, minutes, seconds, milliseconds), + or they may be floating point values specifying the number of + degrees. The other parameters are floats. Size, horizontal precision, + and vertical precision are specified in centimeters.""" + + super().__init__(rdclass, rdtype) + if isinstance(latitude, int): + latitude = float(latitude) + if isinstance(latitude, float): + latitude = _float_to_tuple(latitude) + _check_coordinate_list(latitude, -90, 90) + self.latitude = tuple(latitude) + if isinstance(longitude, int): + longitude = float(longitude) + if isinstance(longitude, float): + longitude = _float_to_tuple(longitude) + _check_coordinate_list(longitude, -180, 180) + self.longitude = tuple(longitude) + self.altitude = float(altitude) + self.size = float(size) + self.horizontal_precision = float(hprec) + self.vertical_precision = float(vprec) + + def to_text(self, origin=None, relativize=True, **kw): + if self.latitude[4] > 0: + lat_hemisphere = "N" + else: + lat_hemisphere = "S" + if self.longitude[4] > 0: + long_hemisphere = "E" + else: + long_hemisphere = "W" + text = "%d %d %d.%03d %s %d %d %d.%03d %s %0.2fm" % ( + self.latitude[0], + self.latitude[1], + self.latitude[2], + self.latitude[3], + lat_hemisphere, + self.longitude[0], + self.longitude[1], + self.longitude[2], + self.longitude[3], + long_hemisphere, + self.altitude / 100.0, + ) + + # do not print default values + if ( + self.size != _default_size + or self.horizontal_precision != _default_hprec + or self.vertical_precision != _default_vprec + ): + text += " {:0.2f}m {:0.2f}m {:0.2f}m".format( + self.size / 100.0, + self.horizontal_precision / 100.0, + self.vertical_precision / 100.0, + ) + return text + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + latitude = [0, 0, 0, 0, 1] + longitude = [0, 0, 0, 0, 1] + size = _default_size + hprec = _default_hprec + vprec = _default_vprec + + latitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + latitude[1] = int(t) + t = tok.get_string() + if "." in t: + (seconds, milliseconds) = t.split(".") + if not seconds.isdigit(): + raise dns.exception.SyntaxError("bad latitude seconds value") + latitude[2] = int(seconds) + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError("bad latitude milliseconds value") + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + latitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + latitude[2] = int(t) + t = tok.get_string() + if t == "S": + latitude[4] = -1 + elif t != "N": + raise dns.exception.SyntaxError("bad latitude hemisphere value") + + longitude[0] = tok.get_int() + t = tok.get_string() + if t.isdigit(): + longitude[1] = int(t) + t = tok.get_string() + if "." in t: + (seconds, milliseconds) = t.split(".") + if not seconds.isdigit(): + raise dns.exception.SyntaxError("bad longitude seconds value") + longitude[2] = int(seconds) + l = len(milliseconds) + if l == 0 or l > 3 or not milliseconds.isdigit(): + raise dns.exception.SyntaxError("bad longitude milliseconds value") + if l == 1: + m = 100 + elif l == 2: + m = 10 + else: + m = 1 + longitude[3] = m * int(milliseconds) + t = tok.get_string() + elif t.isdigit(): + longitude[2] = int(t) + t = tok.get_string() + if t == "W": + longitude[4] = -1 + elif t != "E": + raise dns.exception.SyntaxError("bad longitude hemisphere value") + + t = tok.get_string() + if t[-1] == "m": + t = t[0:-1] + altitude = float(t) * 100.0 # m -> cm + + tokens = tok.get_remaining(max_tokens=3) + if len(tokens) >= 1: + value = tokens[0].unescape().value + if value[-1] == "m": + value = value[0:-1] + size = float(value) * 100.0 # m -> cm + if len(tokens) >= 2: + value = tokens[1].unescape().value + if value[-1] == "m": + value = value[0:-1] + hprec = float(value) * 100.0 # m -> cm + if len(tokens) >= 3: + value = tokens[2].unescape().value + if value[-1] == "m": + value = value[0:-1] + vprec = float(value) * 100.0 # m -> cm + + # Try encoding these now so we raise if they are bad + _encode_size(size, "size") + _encode_size(hprec, "horizontal precision") + _encode_size(vprec, "vertical precision") + + return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + milliseconds = ( + self.latitude[0] * 3600000 + + self.latitude[1] * 60000 + + self.latitude[2] * 1000 + + self.latitude[3] + ) * self.latitude[4] + latitude = 0x80000000 + milliseconds + milliseconds = ( + self.longitude[0] * 3600000 + + self.longitude[1] * 60000 + + self.longitude[2] * 1000 + + self.longitude[3] + ) * self.longitude[4] + longitude = 0x80000000 + milliseconds + altitude = int(self.altitude) + 10000000 + size = _encode_size(self.size, "size") + hprec = _encode_size(self.horizontal_precision, "horizontal precision") + vprec = _encode_size(self.vertical_precision, "vertical precision") + wire = struct.pack( + "!BBBBIII", 0, size, hprec, vprec, latitude, longitude, altitude + ) + file.write(wire) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + ( + version, + size, + hprec, + vprec, + latitude, + longitude, + altitude, + ) = parser.get_struct("!BBBBIII") + if version != 0: + raise dns.exception.FormError("LOC version not zero") + if latitude < _MIN_LATITUDE or latitude > _MAX_LATITUDE: + raise dns.exception.FormError("bad latitude") + if latitude > 0x80000000: + latitude = (latitude - 0x80000000) / 3600000 + else: + latitude = -1 * (0x80000000 - latitude) / 3600000 + if longitude < _MIN_LONGITUDE or longitude > _MAX_LONGITUDE: + raise dns.exception.FormError("bad longitude") + if longitude > 0x80000000: + longitude = (longitude - 0x80000000) / 3600000 + else: + longitude = -1 * (0x80000000 - longitude) / 3600000 + altitude = float(altitude) - 10000000.0 + size = _decode_size(size, "size") + hprec = _decode_size(hprec, "horizontal precision") + vprec = _decode_size(vprec, "vertical precision") + return cls(rdclass, rdtype, latitude, longitude, altitude, size, hprec, vprec) + + @property + def float_latitude(self): + "latitude as a floating point value" + return _tuple_to_float(self.latitude) + + @property + def float_longitude(self): + "longitude as a floating point value" + return _tuple_to_float(self.longitude) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/LP.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/LP.py new file mode 100644 index 0000000000000000000000000000000000000000..8a7c5125e0a8ffbf263f3ac4c2a4b4d455913722 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/LP.py @@ -0,0 +1,43 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class LP(dns.rdata.Rdata): + + """LP record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "fqdn"] + + def __init__(self, rdclass, rdtype, preference, fqdn): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.fqdn = self._as_name(fqdn) + + def to_text(self, origin=None, relativize=True, **kw): + fqdn = self.fqdn.choose_relativity(origin, relativize) + return "%d %s" % (self.preference, fqdn) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + fqdn = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, fqdn) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + self.fqdn.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + fqdn = parser.get_name(origin) + return cls(rdclass, rdtype, preference, fqdn) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/MX.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/MX.py new file mode 100644 index 0000000000000000000000000000000000000000..1f9df21f15e2ea3d971f385fdb42f6bd961a0091 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/MX.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class MX(dns.rdtypes.mxbase.MXBase): + + """MX record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NID.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NID.py new file mode 100644 index 0000000000000000000000000000000000000000..ad54aca3ea1352fd44c0a676b9f20154ae424aa3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NID.py @@ -0,0 +1,48 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import struct + +import dns.immutable +import dns.rdtypes.util + + +@dns.immutable.immutable +class NID(dns.rdata.Rdata): + + """NID record""" + + # see: rfc6742.txt + + __slots__ = ["preference", "nodeid"] + + def __init__(self, rdclass, rdtype, preference, nodeid): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + if isinstance(nodeid, bytes): + if len(nodeid) != 8: + raise ValueError("invalid nodeid") + self.nodeid = dns.rdata._hexify(nodeid, 4, b":") + else: + dns.rdtypes.util.parse_formatted_hex(nodeid, 4, 4, ":") + self.nodeid = nodeid + + def to_text(self, origin=None, relativize=True, **kw): + return f"{self.preference} {self.nodeid}" + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + nodeid = tok.get_identifier() + return cls(rdclass, rdtype, preference, nodeid) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.preference)) + file.write(dns.rdtypes.util.parse_formatted_hex(self.nodeid, 4, 4, ":")) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + nodeid = parser.get_remaining() + return cls(rdclass, rdtype, preference, nodeid) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NINFO.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NINFO.py new file mode 100644 index 0000000000000000000000000000000000000000..55bc56149d04080c8fd7f46857f2439533dbe2d9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NINFO.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class NINFO(dns.rdtypes.txtbase.TXTBase): + + """NINFO record""" + + # see: draft-reid-dnsext-zs-01 diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NS.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NS.py new file mode 100644 index 0000000000000000000000000000000000000000..fe453f0d7df241523eede96f4386c5e40ab09733 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NS.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class NS(dns.rdtypes.nsbase.NSBase): + + """NS record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NSEC.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NSEC.py new file mode 100644 index 0000000000000000000000000000000000000000..a2d98fa7ca9832bd0ae88e167a5ac3b76227041f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NSEC.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = "NSEC" + + +@dns.immutable.immutable +class NSEC(dns.rdata.Rdata): + + """NSEC record""" + + __slots__ = ["next", "windows"] + + def __init__(self, rdclass, rdtype, next, windows): + super().__init__(rdclass, rdtype) + self.next = self._as_name(next) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + next = self.next.choose_relativity(origin, relativize) + text = Bitmap(self.windows).to_text() + return "{}{}".format(next, text) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + next = tok.get_name(origin, relativize, relativize_to) + windows = Bitmap.from_text(tok) + return cls(rdclass, rdtype, next, windows) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + # Note that NSEC downcasing, originally mandated by RFC 4034 + # section 6.2 was removed by RFC 6840 section 5.1. + self.next.to_wire(file, None, origin, False) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + next = parser.get_name(origin) + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, next, bitmap) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NSEC3.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NSEC3.py new file mode 100644 index 0000000000000000000000000000000000000000..d32fe169fe92dd5b2869e6edb2e3864a1eb09b53 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NSEC3.py @@ -0,0 +1,120 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.rdtypes.util + +b32_hex_to_normal = bytes.maketrans( + b"0123456789ABCDEFGHIJKLMNOPQRSTUV", b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" +) +b32_normal_to_hex = bytes.maketrans( + b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567", b"0123456789ABCDEFGHIJKLMNOPQRSTUV" +) + +# hash algorithm constants +SHA1 = 1 + +# flag constants +OPTOUT = 1 + + +@dns.immutable.immutable +class Bitmap(dns.rdtypes.util.Bitmap): + type_name = "NSEC3" + + +@dns.immutable.immutable +class NSEC3(dns.rdata.Rdata): + + """NSEC3 record""" + + __slots__ = ["algorithm", "flags", "iterations", "salt", "next", "windows"] + + def __init__( + self, rdclass, rdtype, algorithm, flags, iterations, salt, next, windows + ): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.flags = self._as_uint8(flags) + self.iterations = self._as_uint16(iterations) + self.salt = self._as_bytes(salt, True, 255) + self.next = self._as_bytes(next, True, 255) + if not isinstance(windows, Bitmap): + windows = Bitmap(windows) + self.windows = tuple(windows.windows) + + def to_text(self, origin=None, relativize=True, **kw): + next = base64.b32encode(self.next).translate(b32_normal_to_hex).lower().decode() + next = next.rstrip("=") + if self.salt == b"": + salt = "-" + else: + salt = binascii.hexlify(self.salt).decode() + text = Bitmap(self.windows).to_text() + return "%u %u %u %s %s%s" % ( + self.algorithm, + self.flags, + self.iterations, + salt, + next, + text, + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == "-": + salt = b"" + else: + salt = binascii.unhexlify(salt.encode("ascii")) + next = tok.get_string().encode("ascii").upper().translate(b32_hex_to_normal) + if next.endswith(b"="): + raise binascii.Error("Incorrect padding") + if len(next) % 8 != 0: + next += b"=" * (8 - len(next) % 8) + next = base64.b32decode(next) + bitmap = Bitmap.from_text(tok) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) + file.write(self.salt) + l = len(self.next) + file.write(struct.pack("!B", l)) + file.write(self.next) + Bitmap(self.windows).to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (algorithm, flags, iterations) = parser.get_struct("!BBH") + salt = parser.get_counted_bytes() + next = parser.get_counted_bytes() + bitmap = Bitmap.from_wire_parser(parser) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt, next, bitmap) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py new file mode 100644 index 0000000000000000000000000000000000000000..1a0c0e0849f5f8d82e7850400e32a4ad69308389 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/NSEC3PARAM.py @@ -0,0 +1,70 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class NSEC3PARAM(dns.rdata.Rdata): + + """NSEC3PARAM record""" + + __slots__ = ["algorithm", "flags", "iterations", "salt"] + + def __init__(self, rdclass, rdtype, algorithm, flags, iterations, salt): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.flags = self._as_uint8(flags) + self.iterations = self._as_uint16(iterations) + self.salt = self._as_bytes(salt, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + if self.salt == b"": + salt = "-" + else: + salt = binascii.hexlify(self.salt).decode() + return "%u %u %u %s" % (self.algorithm, self.flags, self.iterations, salt) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + flags = tok.get_uint8() + iterations = tok.get_uint16() + salt = tok.get_string() + if salt == "-": + salt = "" + else: + salt = binascii.unhexlify(salt.encode()) + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.salt) + file.write(struct.pack("!BBHB", self.algorithm, self.flags, self.iterations, l)) + file.write(self.salt) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (algorithm, flags, iterations) = parser.get_struct("!BBH") + salt = parser.get_counted_bytes() + return cls(rdclass, rdtype, algorithm, flags, iterations, salt) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py new file mode 100644 index 0000000000000000000000000000000000000000..e5e2572751cb12b434a4d385bd99d262d48296b1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/OPENPGPKEY.py @@ -0,0 +1,54 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2016 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class OPENPGPKEY(dns.rdata.Rdata): + + """OPENPGPKEY record""" + + # see: RFC 7929 + + def __init__(self, rdclass, rdtype, key): + super().__init__(rdclass, rdtype) + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.key, chunksize=None, **kw) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls(rdclass, rdtype, key) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + key = parser.get_remaining() + return cls(rdclass, rdtype, key) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/OPT.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/OPT.py new file mode 100644 index 0000000000000000000000000000000000000000..d70e5373d35bfaeec124e32226d0dec42077cdc5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/OPT.py @@ -0,0 +1,78 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.edns +import dns.exception +import dns.immutable +import dns.rdata + +# We don't implement from_text, and that's ok. +# pylint: disable=abstract-method + + +@dns.immutable.immutable +class OPT(dns.rdata.Rdata): + + """OPT record""" + + __slots__ = ["options"] + + def __init__(self, rdclass, rdtype, options): + """Initialize an OPT rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata, + which is also the payload size. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *options*, a tuple of ``bytes`` + """ + + super().__init__(rdclass, rdtype) + + def as_option(option): + if not isinstance(option, dns.edns.Option): + raise ValueError("option is not a dns.edns.option") + return option + + self.options = self._as_tuple(options, as_option) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for opt in self.options: + owire = opt.to_wire() + file.write(struct.pack("!HH", opt.otype, len(owire))) + file.write(owire) + + def to_text(self, origin=None, relativize=True, **kw): + return " ".join(opt.to_text() for opt in self.options) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + options = [] + while parser.remaining() > 0: + (otype, olen) = parser.get_struct("!HH") + with parser.restrict_to(olen): + opt = dns.edns.option_from_wire_parser(otype, parser) + options.append(opt) + return cls(rdclass, rdtype, options) + + @property + def payload(self): + "payload size" + return self.rdclass diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/PTR.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/PTR.py new file mode 100644 index 0000000000000000000000000000000000000000..7fd5547d4521bd2774e548693f73b882b415c911 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/PTR.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class PTR(dns.rdtypes.nsbase.NSBase): + + """PTR record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/RP.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/RP.py new file mode 100644 index 0000000000000000000000000000000000000000..9c64c6e2283766dd37fcd3f344adae9a524bae28 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/RP.py @@ -0,0 +1,59 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata + + +@dns.immutable.immutable +class RP(dns.rdata.Rdata): + + """RP record""" + + # see: RFC 1183 + + __slots__ = ["mbox", "txt"] + + def __init__(self, rdclass, rdtype, mbox, txt): + super().__init__(rdclass, rdtype) + self.mbox = self._as_name(mbox) + self.txt = self._as_name(txt) + + def to_text(self, origin=None, relativize=True, **kw): + mbox = self.mbox.choose_relativity(origin, relativize) + txt = self.txt.choose_relativity(origin, relativize) + return "{} {}".format(str(mbox), str(txt)) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + mbox = tok.get_name(origin, relativize, relativize_to) + txt = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, mbox, txt) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.mbox.to_wire(file, None, origin, canonicalize) + self.txt.to_wire(file, None, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + mbox = parser.get_name(origin) + txt = parser.get_name(origin) + return cls(rdclass, rdtype, mbox, txt) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/RRSIG.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/RRSIG.py new file mode 100644 index 0000000000000000000000000000000000000000..116050266ea661a101b3dd27e3a1caf6fd603955 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/RRSIG.py @@ -0,0 +1,159 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import calendar +import struct +import time + +import dns.dnssectypes +import dns.exception +import dns.immutable +import dns.rdata +import dns.rdatatype + + +class BadSigTime(dns.exception.DNSException): + + """Time in DNS SIG or RRSIG resource record cannot be parsed.""" + + +def sigtime_to_posixtime(what): + if len(what) <= 10 and what.isdigit(): + return int(what) + if len(what) != 14: + raise BadSigTime + year = int(what[0:4]) + month = int(what[4:6]) + day = int(what[6:8]) + hour = int(what[8:10]) + minute = int(what[10:12]) + second = int(what[12:14]) + return calendar.timegm((year, month, day, hour, minute, second, 0, 0, 0)) + + +def posixtime_to_sigtime(what): + return time.strftime("%Y%m%d%H%M%S", time.gmtime(what)) + + +@dns.immutable.immutable +class RRSIG(dns.rdata.Rdata): + + """RRSIG record""" + + __slots__ = [ + "type_covered", + "algorithm", + "labels", + "original_ttl", + "expiration", + "inception", + "key_tag", + "signer", + "signature", + ] + + def __init__( + self, + rdclass, + rdtype, + type_covered, + algorithm, + labels, + original_ttl, + expiration, + inception, + key_tag, + signer, + signature, + ): + super().__init__(rdclass, rdtype) + self.type_covered = self._as_rdatatype(type_covered) + self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) + self.labels = self._as_uint8(labels) + self.original_ttl = self._as_ttl(original_ttl) + self.expiration = self._as_uint32(expiration) + self.inception = self._as_uint32(inception) + self.key_tag = self._as_uint16(key_tag) + self.signer = self._as_name(signer) + self.signature = self._as_bytes(signature) + + def covers(self): + return self.type_covered + + def to_text(self, origin=None, relativize=True, **kw): + return "%s %d %d %d %s %s %d %s %s" % ( + dns.rdatatype.to_text(self.type_covered), + self.algorithm, + self.labels, + self.original_ttl, + posixtime_to_sigtime(self.expiration), + posixtime_to_sigtime(self.inception), + self.key_tag, + self.signer.choose_relativity(origin, relativize), + dns.rdata._base64ify(self.signature, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + type_covered = dns.rdatatype.from_text(tok.get_string()) + algorithm = dns.dnssectypes.Algorithm.from_text(tok.get_string()) + labels = tok.get_int() + original_ttl = tok.get_ttl() + expiration = sigtime_to_posixtime(tok.get_string()) + inception = sigtime_to_posixtime(tok.get_string()) + key_tag = tok.get_int() + signer = tok.get_name(origin, relativize, relativize_to) + b64 = tok.concatenate_remaining_identifiers().encode() + signature = base64.b64decode(b64) + return cls( + rdclass, + rdtype, + type_covered, + algorithm, + labels, + original_ttl, + expiration, + inception, + key_tag, + signer, + signature, + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack( + "!HBBIIIH", + self.type_covered, + self.algorithm, + self.labels, + self.original_ttl, + self.expiration, + self.inception, + self.key_tag, + ) + file.write(header) + self.signer.to_wire(file, None, origin, canonicalize) + file.write(self.signature) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBBIIIH") + signer = parser.get_name(origin) + signature = parser.get_remaining() + return cls(rdclass, rdtype, *header, signer, signature) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/RT.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/RT.py new file mode 100644 index 0000000000000000000000000000000000000000..950f2a066fb898df5bcd34a11df953c7d6b54228 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/RT.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class RT(dns.rdtypes.mxbase.UncompressedDowncasingMX): + + """RT record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SMIMEA.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SMIMEA.py new file mode 100644 index 0000000000000000000000000000000000000000..55d87bf85cbe9d9f98bfddf53e2646db789742ca --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SMIMEA.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.tlsabase + + +@dns.immutable.immutable +class SMIMEA(dns.rdtypes.tlsabase.TLSABase): + """SMIMEA record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SOA.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SOA.py new file mode 100644 index 0000000000000000000000000000000000000000..bde55e15fa53ccecc33f6fcabef589aef293d18f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SOA.py @@ -0,0 +1,87 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata + + +@dns.immutable.immutable +class SOA(dns.rdata.Rdata): + + """SOA record""" + + # see: RFC 1035 + + __slots__ = ["mname", "rname", "serial", "refresh", "retry", "expire", "minimum"] + + def __init__( + self, rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum + ): + super().__init__(rdclass, rdtype) + self.mname = self._as_name(mname) + self.rname = self._as_name(rname) + self.serial = self._as_uint32(serial) + self.refresh = self._as_ttl(refresh) + self.retry = self._as_ttl(retry) + self.expire = self._as_ttl(expire) + self.minimum = self._as_ttl(minimum) + + def to_text(self, origin=None, relativize=True, **kw): + mname = self.mname.choose_relativity(origin, relativize) + rname = self.rname.choose_relativity(origin, relativize) + return "%s %s %d %d %d %d %d" % ( + mname, + rname, + self.serial, + self.refresh, + self.retry, + self.expire, + self.minimum, + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + mname = tok.get_name(origin, relativize, relativize_to) + rname = tok.get_name(origin, relativize, relativize_to) + serial = tok.get_uint32() + refresh = tok.get_ttl() + retry = tok.get_ttl() + expire = tok.get_ttl() + minimum = tok.get_ttl() + return cls( + rdclass, rdtype, mname, rname, serial, refresh, retry, expire, minimum + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.mname.to_wire(file, compress, origin, canonicalize) + self.rname.to_wire(file, compress, origin, canonicalize) + five_ints = struct.pack( + "!IIIII", self.serial, self.refresh, self.retry, self.expire, self.minimum + ) + file.write(five_ints) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + mname = parser.get_name(origin) + rname = parser.get_name(origin) + return cls(rdclass, rdtype, mname, rname, *parser.get_struct("!IIIII")) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SPF.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SPF.py new file mode 100644 index 0000000000000000000000000000000000000000..c403589a214cde2426492f015a629ddee732d59f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SPF.py @@ -0,0 +1,27 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class SPF(dns.rdtypes.txtbase.TXTBase): + + """SPF record""" + + # see: RFC 4408 diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SSHFP.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SSHFP.py new file mode 100644 index 0000000000000000000000000000000000000000..67805452d6b7e17cf40dcc792130972f1b2b98b2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/SSHFP.py @@ -0,0 +1,69 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class SSHFP(dns.rdata.Rdata): + + """SSHFP record""" + + # See RFC 4255 + + __slots__ = ["algorithm", "fp_type", "fingerprint"] + + def __init__(self, rdclass, rdtype, algorithm, fp_type, fingerprint): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_uint8(algorithm) + self.fp_type = self._as_uint8(fp_type) + self.fingerprint = self._as_bytes(fingerprint, True) + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + return "%d %d %s" % ( + self.algorithm, + self.fp_type, + dns.rdata._hexify(self.fingerprint, chunksize=chunksize, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_uint8() + fp_type = tok.get_uint8() + fingerprint = tok.concatenate_remaining_identifiers().encode() + fingerprint = binascii.unhexlify(fingerprint) + return cls(rdclass, rdtype, algorithm, fp_type, fingerprint) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BB", self.algorithm, self.fp_type) + file.write(header) + file.write(self.fingerprint) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("BB") + fingerprint = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], fingerprint) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TKEY.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TKEY.py new file mode 100644 index 0000000000000000000000000000000000000000..d5f5fc4581e62eb29865a26f0c0f9c84056ab903 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TKEY.py @@ -0,0 +1,143 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class TKEY(dns.rdata.Rdata): + + """TKEY Record""" + + __slots__ = [ + "algorithm", + "inception", + "expiration", + "mode", + "error", + "key", + "other", + ] + + def __init__( + self, + rdclass, + rdtype, + algorithm, + inception, + expiration, + mode, + error, + key, + other=b"", + ): + super().__init__(rdclass, rdtype) + self.algorithm = self._as_name(algorithm) + self.inception = self._as_uint32(inception) + self.expiration = self._as_uint32(expiration) + self.mode = self._as_uint16(mode) + self.error = self._as_uint16(error) + self.key = self._as_bytes(key) + self.other = self._as_bytes(other) + + def to_text(self, origin=None, relativize=True, **kw): + _algorithm = self.algorithm.choose_relativity(origin, relativize) + text = "%s %u %u %u %u %s" % ( + str(_algorithm), + self.inception, + self.expiration, + self.mode, + self.error, + dns.rdata._base64ify(self.key, 0), + ) + if len(self.other) > 0: + text += " %s" % (dns.rdata._base64ify(self.other, 0)) + + return text + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_name(relativize=False) + inception = tok.get_uint32() + expiration = tok.get_uint32() + mode = tok.get_uint16() + error = tok.get_uint16() + key_b64 = tok.get_string().encode() + key = base64.b64decode(key_b64) + other_b64 = tok.concatenate_remaining_identifiers(True).encode() + other = base64.b64decode(other_b64) + + return cls( + rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.algorithm.to_wire(file, compress, origin) + file.write( + struct.pack("!IIHH", self.inception, self.expiration, self.mode, self.error) + ) + file.write(struct.pack("!H", len(self.key))) + file.write(self.key) + file.write(struct.pack("!H", len(self.other))) + if len(self.other) > 0: + file.write(self.other) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + algorithm = parser.get_name(origin) + inception, expiration, mode, error = parser.get_struct("!IIHH") + key = parser.get_counted_bytes(2) + other = parser.get_counted_bytes(2) + + return cls( + rdclass, rdtype, algorithm, inception, expiration, mode, error, key, other + ) + + # Constants for the mode field - from RFC 2930: + # 2.5 The Mode Field + # + # The mode field specifies the general scheme for key agreement or + # the purpose of the TKEY DNS message. Servers and resolvers + # supporting this specification MUST implement the Diffie-Hellman key + # agreement mode and the key deletion mode for queries. All other + # modes are OPTIONAL. A server supporting TKEY that receives a TKEY + # request with a mode it does not support returns the BADMODE error. + # The following values of the Mode octet are defined, available, or + # reserved: + # + # Value Description + # ----- ----------- + # 0 - reserved, see section 7 + # 1 server assignment + # 2 Diffie-Hellman exchange + # 3 GSS-API negotiation + # 4 resolver assignment + # 5 key deletion + # 6-65534 - available, see section 7 + # 65535 - reserved, see section 7 + SERVER_ASSIGNMENT = 1 + DIFFIE_HELLMAN_EXCHANGE = 2 + GSSAPI_NEGOTIATION = 3 + RESOLVER_ASSIGNMENT = 4 + KEY_DELETION = 5 diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TLSA.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TLSA.py new file mode 100644 index 0000000000000000000000000000000000000000..c9ba199112f76f3c2cc797299ba6fe70e9550e31 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TLSA.py @@ -0,0 +1,10 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.tlsabase + + +@dns.immutable.immutable +class TLSA(dns.rdtypes.tlsabase.TLSABase): + + """TLSA record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TSIG.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TSIG.py new file mode 100644 index 0000000000000000000000000000000000000000..1ae87ebe61de9fd42fa5b04bbcef412343e7cdfb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TSIG.py @@ -0,0 +1,161 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rcode +import dns.rdata + + +@dns.immutable.immutable +class TSIG(dns.rdata.Rdata): + + """TSIG record""" + + __slots__ = [ + "algorithm", + "time_signed", + "fudge", + "mac", + "original_id", + "error", + "other", + ] + + def __init__( + self, + rdclass, + rdtype, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ): + """Initialize a TSIG rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *algorithm*, a ``dns.name.Name``. + + *time_signed*, an ``int``. + + *fudge*, an ``int`. + + *mac*, a ``bytes`` + + *original_id*, an ``int`` + + *error*, an ``int`` + + *other*, a ``bytes`` + """ + + super().__init__(rdclass, rdtype) + self.algorithm = self._as_name(algorithm) + self.time_signed = self._as_uint48(time_signed) + self.fudge = self._as_uint16(fudge) + self.mac = self._as_bytes(mac) + self.original_id = self._as_uint16(original_id) + self.error = dns.rcode.Rcode.make(error) + self.other = self._as_bytes(other) + + def to_text(self, origin=None, relativize=True, **kw): + algorithm = self.algorithm.choose_relativity(origin, relativize) + error = dns.rcode.to_text(self.error, True) + text = ( + f"{algorithm} {self.time_signed} {self.fudge} " + + f"{len(self.mac)} {dns.rdata._base64ify(self.mac, 0)} " + + f"{self.original_id} {error} {len(self.other)}" + ) + if self.other: + text += f" {dns.rdata._base64ify(self.other, 0)}" + return text + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + algorithm = tok.get_name(relativize=False) + time_signed = tok.get_uint48() + fudge = tok.get_uint16() + mac_len = tok.get_uint16() + mac = base64.b64decode(tok.get_string()) + if len(mac) != mac_len: + raise SyntaxError("invalid MAC") + original_id = tok.get_uint16() + error = dns.rcode.from_text(tok.get_string()) + other_len = tok.get_uint16() + if other_len > 0: + other = base64.b64decode(tok.get_string()) + if len(other) != other_len: + raise SyntaxError("invalid other data") + else: + other = b"" + return cls( + rdclass, + rdtype, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.algorithm.to_wire(file, None, origin, False) + file.write( + struct.pack( + "!HIHH", + (self.time_signed >> 32) & 0xFFFF, + self.time_signed & 0xFFFFFFFF, + self.fudge, + len(self.mac), + ) + ) + file.write(self.mac) + file.write(struct.pack("!HHH", self.original_id, self.error, len(self.other))) + file.write(self.other) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + algorithm = parser.get_name() + time_signed = parser.get_uint48() + fudge = parser.get_uint16() + mac = parser.get_counted_bytes(2) + (original_id, error) = parser.get_struct("!HH") + other = parser.get_counted_bytes(2) + return cls( + rdclass, + rdtype, + algorithm, + time_signed, + fudge, + mac, + original_id, + error, + other, + ) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TXT.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TXT.py new file mode 100644 index 0000000000000000000000000000000000000000..f4e619300f574f6059f4e20210e120f01098a4ba --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/TXT.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.txtbase + + +@dns.immutable.immutable +class TXT(dns.rdtypes.txtbase.TXTBase): + + """TXT record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/URI.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/URI.py new file mode 100644 index 0000000000000000000000000000000000000000..7463e277dc19db1f71f66fbd89abcaa29c2f8e2b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/URI.py @@ -0,0 +1,80 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# Copyright (C) 2015 Red Hat, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class URI(dns.rdata.Rdata): + + """URI record""" + + # see RFC 7553 + + __slots__ = ["priority", "weight", "target"] + + def __init__(self, rdclass, rdtype, priority, weight, target): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.weight = self._as_uint16(weight) + self.target = self._as_bytes(target, True) + if len(self.target) == 0: + raise dns.exception.SyntaxError("URI target cannot be empty") + + def to_text(self, origin=None, relativize=True, **kw): + return '%d %d "%s"' % (self.priority, self.weight, self.target.decode()) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + priority = tok.get_uint16() + weight = tok.get_uint16() + target = tok.get().unescape() + if not (target.is_quoted_string() or target.is_identifier()): + raise dns.exception.SyntaxError("URI target must be a string") + return cls(rdclass, rdtype, priority, weight, target.value) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + two_ints = struct.pack("!HH", self.priority, self.weight) + file.write(two_ints) + file.write(self.target) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (priority, weight) = parser.get_struct("!HH") + target = parser.get_remaining() + if len(target) == 0: + raise dns.exception.FormError("URI target may not be empty") + return cls(rdclass, rdtype, priority, weight, target) + + def _processing_priority(self): + return self.priority + + def _processing_weight(self): + return self.weight + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/X25.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/X25.py new file mode 100644 index 0000000000000000000000000000000000000000..06c14534543664abcc73fbdeb8fbac7aff6e4aee --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/X25.py @@ -0,0 +1,58 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class X25(dns.rdata.Rdata): + + """X25 record""" + + # see RFC 1183 + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address, True, 255) + + def to_text(self, origin=None, relativize=True, **kw): + return '"%s"' % dns.rdata._escapify(self.address) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + l = len(self.address) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(self.address) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_counted_bytes() + return cls(rdclass, rdtype, address) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/ZONEMD.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/ZONEMD.py new file mode 100644 index 0000000000000000000000000000000000000000..3062843b6342fec269aa635088bef9a5eacb2b20 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/ZONEMD.py @@ -0,0 +1,67 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import binascii +import struct + +import dns.immutable +import dns.rdata +import dns.rdatatype +import dns.zonetypes + + +@dns.immutable.immutable +class ZONEMD(dns.rdata.Rdata): + + """ZONEMD record""" + + # See RFC 8976 + + __slots__ = ["serial", "scheme", "hash_algorithm", "digest"] + + def __init__(self, rdclass, rdtype, serial, scheme, hash_algorithm, digest): + super().__init__(rdclass, rdtype) + self.serial = self._as_uint32(serial) + self.scheme = dns.zonetypes.DigestScheme.make(scheme) + self.hash_algorithm = dns.zonetypes.DigestHashAlgorithm.make(hash_algorithm) + self.digest = self._as_bytes(digest) + + if self.scheme == 0: # reserved, RFC 8976 Sec. 5.2 + raise ValueError("scheme 0 is reserved") + if self.hash_algorithm == 0: # reserved, RFC 8976 Sec. 5.3 + raise ValueError("hash_algorithm 0 is reserved") + + hasher = dns.zonetypes._digest_hashers.get(self.hash_algorithm) + if hasher and hasher().digest_size != len(self.digest): + raise ValueError("digest length inconsistent with hash algorithm") + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + return "%d %d %d %s" % ( + self.serial, + self.scheme, + self.hash_algorithm, + dns.rdata._hexify(self.digest, chunksize=chunksize, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + serial = tok.get_uint32() + scheme = tok.get_uint8() + hash_algorithm = tok.get_uint8() + digest = tok.concatenate_remaining_identifiers().encode() + digest = binascii.unhexlify(digest) + return cls(rdclass, rdtype, serial, scheme, hash_algorithm, digest) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!IBB", self.serial, self.scheme, self.hash_algorithm) + file.write(header) + file.write(self.digest) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!IBB") + digest = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__init__.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3824a0a09ebcef2a5102a48348d5a9e21f50b383 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__init__.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class ANY (generic) rdata type classes.""" + +__all__ = [ + "AFSDB", + "AMTRELAY", + "AVC", + "CAA", + "CDNSKEY", + "CDS", + "CERT", + "CNAME", + "CSYNC", + "DLV", + "DNAME", + "DNSKEY", + "DS", + "EUI48", + "EUI64", + "GPOS", + "HINFO", + "HIP", + "ISDN", + "L32", + "L64", + "LOC", + "LP", + "MX", + "NID", + "NINFO", + "NS", + "NSEC", + "NSEC3", + "NSEC3PARAM", + "OPENPGPKEY", + "OPT", + "PTR", + "RP", + "RRSIG", + "RT", + "SMIMEA", + "SOA", + "SPF", + "SSHFP", + "TKEY", + "TLSA", + "TSIG", + "TXT", + "URI", + "X25", + "ZONEMD", +] diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6d4184bf0064d5dddf7d0fad90bd163b39758ff7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/AFSDB.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c2769ae87557f5656b6f12679e61d9c26d17fd7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b66c9ffe39ab62a7b67a33ad7e24404968d8bd3f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/AVC.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4154219394098e130b55855bf542ac91b19017f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CAA.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2a7c76079a42a7e3732773c35f7e95e64b119ae7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2dfa7fb433d0f72427b8c9a04b205b017bb10110 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CDS.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..003c4acf4e2616d1c29e873190fdac83c0be337a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CERT.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8014bd0a0589eed30105e9113565dc3b569dff48 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CNAME.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..738a64cc70ceb8abe2c8b3538f4fcd9102386a1f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/CSYNC.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa877dbc14b39e1fb92fbe8346c876c43a9ea20d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DLV.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..680f1cb4d34d03e96d4323cf59aae958c7ca53cb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DNAME.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ade5ea04f3c0ee50d49a90ddd7c587ecee767d5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3262be813bafc034088219c7b147cbe45b3c5ef8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/DS.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2154f5dc7951fbe2e36a388a7ef0f7a439ed8173 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/EUI48.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa51bd8113a53537a0bd31de038ff9b76035a78d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/EUI64.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..412dd71ebbc434302ef995aa42bae3c1258a89d4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/GPOS.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7accff61f31628f41cc010db7b10ba99a814818b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/HINFO.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c65ce86a247fdeff0eba14d21ce7e3b319c03fa3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/HIP.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81104a06b6c93f736e8fbf645bb908e4beeff23d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/ISDN.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d2850a56615c6fde9b003c0b70a3c9cccaf8cc62 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/L32.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cb4c034777e7868093a2d49b1d521998b12c2521 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/L64.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e598f3c52e0bdc314773506291b5d9f25b2d2ba3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/LOC.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b31321cd8baee50eb992f065d055ba5bbaf2d107 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/LP.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e5893971d7f1dbc701cbad9dda7d8371ff8d165 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/MX.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..348d20938526adbd4287c85e4a7715081c7d5ce8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NID.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..252ac7b55d869deb446d9c015d07f7acb42ba634 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NINFO.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..59011cbd9dbe7d6e3edb44855f28a9ea9e6b1dc3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NS.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68bb1d5fa9851f41ad9831ad2355e6399e58b1b0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NSEC.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..617e0d94dbc6d574b777760e41b83c38859e4f23 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2fa04409dacf3bc9065164f5b3f70a383d2d1db3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e3cd22f3b1375e25001bc243ebb66f94022fa896 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c33a6a4b94d91ae21422a4d000eedecfa31093f4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/OPT.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c882da75e53c6d4d7ff158908b8311d8d92ec615 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/PTR.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3359a4f6a652755d656f4025f0d0372098651871 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/RP.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee01717dd2695b799d08909eca406b48e7af4307 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/RRSIG.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce0f47d0b1b3c62316e3a87adfca1bc7a3e032ee Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/RT.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c98041bb3558e2440484d3e9100035d635a5aee8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30b9b9db101e77b1ac1228f0b72b759c78eb424e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SOA.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2585f6280e525bd0a1c1d779139c937286ec6be1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SPF.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a451ddf632c966454bb177171aa0b841eb92aea0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/SSHFP.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..06d2a24bbeb13c5524a9a24bc0c02c06fe6f083f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TKEY.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf69d632abc3c3007be384cf428eebf06ed64abe Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TLSA.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d3d34c3c46083f6c9656be8c8f8c2cc8a463d1e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TSIG.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..02515a8bceb542eed5dba0e1a732127741978e6d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/TXT.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ea5f31a021aed8314b34d41c708a2c129fa1dc4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/URI.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee24b9b0b4106c4e5e27b0eb3ae8bce92470fd72 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/X25.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae332cc3da6241e1975f35b631e6d30c6f98a670 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..78c5f0801c0a3ee0aaed69919ba038167396ce78 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/ANY/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/A.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/A.py new file mode 100644 index 0000000000000000000000000000000000000000..e457f38a08caaefb443397bbd70db40d4efdd488 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/A.py @@ -0,0 +1,60 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class A(dns.rdata.Rdata): + + """A record for Chaosnet""" + + # domain: the domain of the address + # address: the 16-bit address + + __slots__ = ["domain", "address"] + + def __init__(self, rdclass, rdtype, domain, address): + super().__init__(rdclass, rdtype) + self.domain = self._as_name(domain) + self.address = self._as_uint16(address) + + def to_text(self, origin=None, relativize=True, **kw): + domain = self.domain.choose_relativity(origin, relativize) + return "%s %o" % (domain, self.address) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + domain = tok.get_name(origin, relativize, relativize_to) + address = tok.get_uint16(base=8) + return cls(rdclass, rdtype, domain, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.domain.to_wire(file, compress, origin, canonicalize) + pref = struct.pack("!H", self.address) + file.write(pref) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + domain = parser.get_name(origin) + address = parser.get_uint16() + return cls(rdclass, rdtype, domain, address) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/__init__.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0760c26c2c4c98be5615793d67e9480c982077bf --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/__init__.py @@ -0,0 +1,22 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class CH rdata type classes.""" + +__all__ = [ + "A", +] diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0330a7dfeb955fb683b6b9eb862eea43e8b2658d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/__pycache__/A.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da6f0f69c749853a5aae471aa3439d9ec6064670 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/CH/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/A.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/A.py new file mode 100644 index 0000000000000000000000000000000000000000..713d5eea653ccd285b38d9cd1cbe145d8d6fa49e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/A.py @@ -0,0 +1,52 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class A(dns.rdata.Rdata): + + """A record.""" + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv4_address(address) + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_identifier() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv4.inet_aton(self.address)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/AAAA.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/AAAA.py new file mode 100644 index 0000000000000000000000000000000000000000..f8237b447a8ecec66c1d59e18a5f0ccda0086288 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/AAAA.py @@ -0,0 +1,52 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.exception +import dns.immutable +import dns.ipv6 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class AAAA(dns.rdata.Rdata): + + """AAAA record.""" + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv6_address(address) + + def to_text(self, origin=None, relativize=True, **kw): + return self.address + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_identifier() + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv6.inet_aton(self.address)) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/APL.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/APL.py new file mode 100644 index 0000000000000000000000000000000000000000..f1bb01db199f8e46266f8c128aa8376903d4f337 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/APL.py @@ -0,0 +1,152 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import codecs +import struct + +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class APLItem: + + """An APL list item.""" + + __slots__ = ["family", "negation", "address", "prefix"] + + def __init__(self, family, negation, address, prefix): + self.family = dns.rdata.Rdata._as_uint16(family) + self.negation = dns.rdata.Rdata._as_bool(negation) + if self.family == 1: + self.address = dns.rdata.Rdata._as_ipv4_address(address) + self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 32) + elif self.family == 2: + self.address = dns.rdata.Rdata._as_ipv6_address(address) + self.prefix = dns.rdata.Rdata._as_int(prefix, 0, 128) + else: + self.address = dns.rdata.Rdata._as_bytes(address, max_length=127) + self.prefix = dns.rdata.Rdata._as_uint8(prefix) + + def __str__(self): + if self.negation: + return "!%d:%s/%s" % (self.family, self.address, self.prefix) + else: + return "%d:%s/%s" % (self.family, self.address, self.prefix) + + def to_wire(self, file): + if self.family == 1: + address = dns.ipv4.inet_aton(self.address) + elif self.family == 2: + address = dns.ipv6.inet_aton(self.address) + else: + address = binascii.unhexlify(self.address) + # + # Truncate least significant zero bytes. + # + last = 0 + for i in range(len(address) - 1, -1, -1): + if address[i] != 0: + last = i + 1 + break + address = address[0:last] + l = len(address) + assert l < 128 + if self.negation: + l |= 0x80 + header = struct.pack("!HBB", self.family, self.prefix, l) + file.write(header) + file.write(address) + + +@dns.immutable.immutable +class APL(dns.rdata.Rdata): + + """APL record.""" + + # see: RFC 3123 + + __slots__ = ["items"] + + def __init__(self, rdclass, rdtype, items): + super().__init__(rdclass, rdtype) + for item in items: + if not isinstance(item, APLItem): + raise ValueError("item not an APLItem") + self.items = tuple(items) + + def to_text(self, origin=None, relativize=True, **kw): + return " ".join(map(str, self.items)) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + items = [] + for token in tok.get_remaining(): + item = token.unescape().value + if item[0] == "!": + negation = True + item = item[1:] + else: + negation = False + (family, rest) = item.split(":", 1) + family = int(family) + (address, prefix) = rest.split("/", 1) + prefix = int(prefix) + item = APLItem(family, negation, address, prefix) + items.append(item) + + return cls(rdclass, rdtype, items) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for item in self.items: + item.to_wire(file) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + items = [] + while parser.remaining() > 0: + header = parser.get_struct("!HBB") + afdlen = header[2] + if afdlen > 127: + negation = True + afdlen -= 128 + else: + negation = False + address = parser.get_bytes(afdlen) + l = len(address) + if header[0] == 1: + if l < 4: + address += b"\x00" * (4 - l) + elif header[0] == 2: + if l < 16: + address += b"\x00" * (16 - l) + else: + # + # This isn't really right according to the RFC, but it + # seems better than throwing an exception + # + address = codecs.encode(address, "hex_codec") + item = APLItem(header[0], negation, address, header[1]) + items.append(item) + return cls(rdclass, rdtype, items) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/DHCID.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/DHCID.py new file mode 100644 index 0000000000000000000000000000000000000000..65f858977c248f025cb5116b8b29163583da92c5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/DHCID.py @@ -0,0 +1,55 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 + +import dns.exception +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class DHCID(dns.rdata.Rdata): + + """DHCID record""" + + # see: RFC 4701 + + __slots__ = ["data"] + + def __init__(self, rdclass, rdtype, data): + super().__init__(rdclass, rdtype) + self.data = self._as_bytes(data) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._base64ify(self.data, **kw) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + b64 = tok.concatenate_remaining_identifiers().encode() + data = base64.b64decode(b64) + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.data) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + data = parser.get_remaining() + return cls(rdclass, rdtype, data) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/HTTPS.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/HTTPS.py new file mode 100644 index 0000000000000000000000000000000000000000..15464cbda7f387d8b73d15605bfebc49d1402c27 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/HTTPS.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.svcbbase + + +@dns.immutable.immutable +class HTTPS(dns.rdtypes.svcbbase.SVCBBase): + """HTTPS record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/IPSECKEY.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/IPSECKEY.py new file mode 100644 index 0000000000000000000000000000000000000000..8bb2bcb6b6915d059bbe1d8693b535f021e909a8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/IPSECKEY.py @@ -0,0 +1,92 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import struct + +import dns.exception +import dns.immutable +import dns.rdtypes.util + + +class Gateway(dns.rdtypes.util.Gateway): + name = "IPSECKEY gateway" + + +@dns.immutable.immutable +class IPSECKEY(dns.rdata.Rdata): + + """IPSECKEY record""" + + # see: RFC 4025 + + __slots__ = ["precedence", "gateway_type", "algorithm", "gateway", "key"] + + def __init__( + self, rdclass, rdtype, precedence, gateway_type, algorithm, gateway, key + ): + super().__init__(rdclass, rdtype) + gateway = Gateway(gateway_type, gateway) + self.precedence = self._as_uint8(precedence) + self.gateway_type = gateway.type + self.algorithm = self._as_uint8(algorithm) + self.gateway = gateway.gateway + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + gateway = Gateway(self.gateway_type, self.gateway).to_text(origin, relativize) + return "%d %d %d %s %s" % ( + self.precedence, + self.gateway_type, + self.algorithm, + gateway, + dns.rdata._base64ify(self.key, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + precedence = tok.get_uint8() + gateway_type = tok.get_uint8() + algorithm = tok.get_uint8() + gateway = Gateway.from_text( + gateway_type, tok, origin, relativize, relativize_to + ) + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls( + rdclass, rdtype, precedence, gateway_type, algorithm, gateway.gateway, key + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BBB", self.precedence, self.gateway_type, self.algorithm) + file.write(header) + Gateway(self.gateway_type, self.gateway).to_wire( + file, compress, origin, canonicalize + ) + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!BBB") + gateway_type = header[1] + gateway = Gateway.from_wire_parser(gateway_type, parser, origin) + key = parser.get_remaining() + return cls( + rdclass, rdtype, header[0], gateway_type, header[2], gateway.gateway, key + ) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/KX.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/KX.py new file mode 100644 index 0000000000000000000000000000000000000000..a03d1d51496bf2d9e1f412add76a755f64b99f4e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/KX.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.mxbase + + +@dns.immutable.immutable +class KX(dns.rdtypes.mxbase.UncompressedDowncasingMX): + + """KX record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/NAPTR.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/NAPTR.py new file mode 100644 index 0000000000000000000000000000000000000000..1f1f5a12678af763ab0a458c141fc6d05f887615 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/NAPTR.py @@ -0,0 +1,111 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +def _write_string(file, s): + l = len(s) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(s) + + +@dns.immutable.immutable +class NAPTR(dns.rdata.Rdata): + + """NAPTR record""" + + # see: RFC 3403 + + __slots__ = ["order", "preference", "flags", "service", "regexp", "replacement"] + + def __init__( + self, rdclass, rdtype, order, preference, flags, service, regexp, replacement + ): + super().__init__(rdclass, rdtype) + self.flags = self._as_bytes(flags, True, 255) + self.service = self._as_bytes(service, True, 255) + self.regexp = self._as_bytes(regexp, True, 255) + self.order = self._as_uint16(order) + self.preference = self._as_uint16(preference) + self.replacement = self._as_name(replacement) + + def to_text(self, origin=None, relativize=True, **kw): + replacement = self.replacement.choose_relativity(origin, relativize) + return '%d %d "%s" "%s" "%s" %s' % ( + self.order, + self.preference, + dns.rdata._escapify(self.flags), + dns.rdata._escapify(self.service), + dns.rdata._escapify(self.regexp), + replacement, + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + order = tok.get_uint16() + preference = tok.get_uint16() + flags = tok.get_string() + service = tok.get_string() + regexp = tok.get_string() + replacement = tok.get_name(origin, relativize, relativize_to) + return cls( + rdclass, rdtype, order, preference, flags, service, regexp, replacement + ) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + two_ints = struct.pack("!HH", self.order, self.preference) + file.write(two_ints) + _write_string(file, self.flags) + _write_string(file, self.service) + _write_string(file, self.regexp) + self.replacement.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (order, preference) = parser.get_struct("!HH") + strings = [] + for _ in range(3): + s = parser.get_counted_bytes() + strings.append(s) + replacement = parser.get_name(origin) + return cls( + rdclass, + rdtype, + order, + preference, + strings[0], + strings[1], + strings[2], + replacement, + ) + + def _processing_priority(self): + return (self.order, self.preference) + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/NSAP.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/NSAP.py new file mode 100644 index 0000000000000000000000000000000000000000..be8581e67454a90949624456e8ce10c385f15663 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/NSAP.py @@ -0,0 +1,61 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class NSAP(dns.rdata.Rdata): + + """NSAP record.""" + + # see: RFC 1706 + + __slots__ = ["address"] + + def __init__(self, rdclass, rdtype, address): + super().__init__(rdclass, rdtype) + self.address = self._as_bytes(address) + + def to_text(self, origin=None, relativize=True, **kw): + return "0x%s" % binascii.hexlify(self.address).decode() + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + if address[0:2] != "0x": + raise dns.exception.SyntaxError("string does not start with 0x") + address = address[2:].replace(".", "") + if len(address) % 2 != 0: + raise dns.exception.SyntaxError("hexstring has odd length") + address = binascii.unhexlify(address.encode()) + return cls(rdclass, rdtype, address) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.address) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_remaining() + return cls(rdclass, rdtype, address) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/NSAP_PTR.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/NSAP_PTR.py new file mode 100644 index 0000000000000000000000000000000000000000..0a18fdceb4ce34d30ba55113d6017ba375bc93c7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/NSAP_PTR.py @@ -0,0 +1,25 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import dns.immutable +import dns.rdtypes.nsbase + + +@dns.immutable.immutable +class NSAP_PTR(dns.rdtypes.nsbase.UncompressedNS): + + """NSAP-PTR record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/PX.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/PX.py new file mode 100644 index 0000000000000000000000000000000000000000..5c0aa81ee682a8eda969460856dc7fcebf298681 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/PX.py @@ -0,0 +1,74 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class PX(dns.rdata.Rdata): + + """PX record.""" + + # see: RFC 2163 + + __slots__ = ["preference", "map822", "mapx400"] + + def __init__(self, rdclass, rdtype, preference, map822, mapx400): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.map822 = self._as_name(map822) + self.mapx400 = self._as_name(mapx400) + + def to_text(self, origin=None, relativize=True, **kw): + map822 = self.map822.choose_relativity(origin, relativize) + mapx400 = self.mapx400.choose_relativity(origin, relativize) + return "%d %s %s" % (self.preference, map822, mapx400) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + map822 = tok.get_name(origin, relativize, relativize_to) + mapx400 = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, map822, mapx400) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.map822.to_wire(file, None, origin, canonicalize) + self.mapx400.to_wire(file, None, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + map822 = parser.get_name(origin) + mapx400 = parser.get_name(origin) + return cls(rdclass, rdtype, preference, map822, mapx400) + + def _processing_priority(self): + return self.preference + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/SRV.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/SRV.py new file mode 100644 index 0000000000000000000000000000000000000000..84c5400728661ca94ff5a5dde4884a9a60771e35 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/SRV.py @@ -0,0 +1,76 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class SRV(dns.rdata.Rdata): + + """SRV record""" + + # see: RFC 2782 + + __slots__ = ["priority", "weight", "port", "target"] + + def __init__(self, rdclass, rdtype, priority, weight, port, target): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.weight = self._as_uint16(weight) + self.port = self._as_uint16(port) + self.target = self._as_name(target) + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return "%d %d %d %s" % (self.priority, self.weight, self.port, target) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + priority = tok.get_uint16() + weight = tok.get_uint16() + port = tok.get_uint16() + target = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, priority, weight, port, target) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + three_ints = struct.pack("!HHH", self.priority, self.weight, self.port) + file.write(three_ints) + self.target.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + (priority, weight, port) = parser.get_struct("!HHH") + target = parser.get_name(origin) + return cls(rdclass, rdtype, priority, weight, port, target) + + def _processing_priority(self): + return self.priority + + def _processing_weight(self): + return self.weight + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.weighted_processing_order(iterable) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/SVCB.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/SVCB.py new file mode 100644 index 0000000000000000000000000000000000000000..ff3e9327775faf5f8293bbfa5dd8a0fc645bd0c3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/SVCB.py @@ -0,0 +1,9 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import dns.immutable +import dns.rdtypes.svcbbase + + +@dns.immutable.immutable +class SVCB(dns.rdtypes.svcbbase.SVCBBase): + """SVCB record""" diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/WKS.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/WKS.py new file mode 100644 index 0000000000000000000000000000000000000000..26d287a3c10ba4c667a9365e355bb836767e06c8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/WKS.py @@ -0,0 +1,101 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import socket +import struct + +import dns.immutable +import dns.ipv4 +import dns.rdata + +try: + _proto_tcp = socket.getprotobyname("tcp") + _proto_udp = socket.getprotobyname("udp") +except OSError: + # Fall back to defaults in case /etc/protocols is unavailable. + _proto_tcp = 6 + _proto_udp = 17 + + +@dns.immutable.immutable +class WKS(dns.rdata.Rdata): + + """WKS record""" + + # see: RFC 1035 + + __slots__ = ["address", "protocol", "bitmap"] + + def __init__(self, rdclass, rdtype, address, protocol, bitmap): + super().__init__(rdclass, rdtype) + self.address = self._as_ipv4_address(address) + self.protocol = self._as_uint8(protocol) + self.bitmap = self._as_bytes(bitmap) + + def to_text(self, origin=None, relativize=True, **kw): + bits = [] + for i, byte in enumerate(self.bitmap): + for j in range(0, 8): + if byte & (0x80 >> j): + bits.append(str(i * 8 + j)) + text = " ".join(bits) + return "%s %d %s" % (self.address, self.protocol, text) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + address = tok.get_string() + protocol = tok.get_string() + if protocol.isdigit(): + protocol = int(protocol) + else: + protocol = socket.getprotobyname(protocol) + bitmap = bytearray() + for token in tok.get_remaining(): + value = token.unescape().value + if value.isdigit(): + serv = int(value) + else: + if protocol != _proto_udp and protocol != _proto_tcp: + raise NotImplementedError("protocol must be TCP or UDP") + if protocol == _proto_udp: + protocol_text = "udp" + else: + protocol_text = "tcp" + serv = socket.getservbyname(value, protocol_text) + i = serv // 8 + l = len(bitmap) + if l < i + 1: + for _ in range(l, i + 1): + bitmap.append(0) + bitmap[i] = bitmap[i] | (0x80 >> (serv % 8)) + bitmap = dns.rdata._truncate_bitmap(bitmap) + return cls(rdclass, rdtype, address, protocol, bitmap) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(dns.ipv4.inet_aton(self.address)) + protocol = struct.pack("!B", self.protocol) + file.write(protocol) + file.write(self.bitmap) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + address = parser.get_bytes(4) + protocol = parser.get_uint8() + bitmap = parser.get_remaining() + return cls(rdclass, rdtype, address, protocol, bitmap) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__init__.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dcec4dd24d49ee16a4b2cda0fdb5806b6c13695c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__init__.py @@ -0,0 +1,35 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Class IN rdata type classes.""" + +__all__ = [ + "A", + "AAAA", + "APL", + "DHCID", + "HTTPS", + "IPSECKEY", + "KX", + "NAPTR", + "NSAP", + "NSAP_PTR", + "PX", + "SRV", + "SVCB", + "WKS", +] diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..149b23557d11f487accaeb0a6bf4343346acdbed Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/A.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e2e4923573bae4ec9e66843e12a72c5516b343c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/AAAA.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d37c6a3563b989e17c478f0591edb9da193d8efa Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/APL.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f83dfc4a73c8ee0c62c5cfd3f9d8e7da257aa061 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/DHCID.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b51593c22d7296cceaf2fdac7d56a4941955cb37 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/HTTPS.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c6a58d6b737e6c44ef34bf328f9cdb0081e8467 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd9935c0064e9a3591af91dd8eaca6c13d1f1313 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/KX.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5eec047ba0611d2331daa721f31573fa5504d15c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/NAPTR.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..143a7e2cf69dd9ff5d02175265fb7aaa00ff4811 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/NSAP.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d020ca0c30facbb307b97c72146b7b6ad172c63d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..644bb1108ab7d2db4373996c07c20bf4d02c1471 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/PX.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa149f094cd7acdcc557bceebad3baf8564b8059 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/SRV.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..125f51c1626f8dc2fd0c5ec9f9bc55743d2107ce Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/SVCB.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f37087c744da0c015c73232b497f6aa7a781f1f8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/WKS.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f8f20a83f1d404013dd3c287b1fc5c1496a3161e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/IN/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__init__.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3997f84c3dd9a8a5e2b20d0740dbf9a379cb6967 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__init__.py @@ -0,0 +1,33 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS rdata type classes""" + +__all__ = [ + "ANY", + "IN", + "CH", + "dnskeybase", + "dsbase", + "euibase", + "mxbase", + "nsbase", + "svcbbase", + "tlsabase", + "txtbase", + "util", +] diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9cabe2b6a374998395f48a64755dbb4d2c9c382b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6f7bfa667e586eee129e244af08ba82a74365d5d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/dnskeybase.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8a640a6e0185df7333b7fbd4b82f777d2abc1306 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/dsbase.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/euibase.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/euibase.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33219fe3d7e8fcb279892993c711167f48dc76bf Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/euibase.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6379ed360a1bd232eca8895d80f231bdd96070f0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/mxbase.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3dc10330646920058148e4b8b12517ab5f1b7486 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/nsbase.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bcb85684208a0f9c1bdec710fec57fc0f32b67f6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/svcbbase.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6920d2e4d3817c5d0aefdabb57bc9516b08d5af Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/tlsabase.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..73ed37aa10be4770274a35c194709b881f1ce186 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/txtbase.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/util.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c12c523a995b7c273f377295d54b3e244fc4b85b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/dns/rdtypes/__pycache__/util.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/dnskeybase.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/dnskeybase.py new file mode 100644 index 0000000000000000000000000000000000000000..3bfcf860d44fd4bb8a3d1bf866e021d100ec8f72 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/dnskeybase.py @@ -0,0 +1,88 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import base64 +import enum +import struct + +import dns.dnssectypes +import dns.exception +import dns.immutable +import dns.rdata + +# wildcard import +__all__ = ["SEP", "REVOKE", "ZONE"] # noqa: F822 + + +class Flag(enum.IntFlag): + SEP = 0x0001 + REVOKE = 0x0080 + ZONE = 0x0100 + + +@dns.immutable.immutable +class DNSKEYBase(dns.rdata.Rdata): + + """Base class for rdata that is like a DNSKEY record""" + + __slots__ = ["flags", "protocol", "algorithm", "key"] + + def __init__(self, rdclass, rdtype, flags, protocol, algorithm, key): + super().__init__(rdclass, rdtype) + self.flags = Flag(self._as_uint16(flags)) + self.protocol = self._as_uint8(protocol) + self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) + self.key = self._as_bytes(key) + + def to_text(self, origin=None, relativize=True, **kw): + return "%d %d %d %s" % ( + self.flags, + self.protocol, + self.algorithm, + dns.rdata._base64ify(self.key, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + flags = tok.get_uint16() + protocol = tok.get_uint8() + algorithm = tok.get_string() + b64 = tok.concatenate_remaining_identifiers().encode() + key = base64.b64decode(b64) + return cls(rdclass, rdtype, flags, protocol, algorithm, key) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!HBB", self.flags, self.protocol, self.algorithm) + file.write(header) + file.write(self.key) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBB") + key = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], key) + + +### BEGIN generated Flag constants + +SEP = Flag.SEP +REVOKE = Flag.REVOKE +ZONE = Flag.ZONE + +### END generated Flag constants diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/dsbase.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/dsbase.py new file mode 100644 index 0000000000000000000000000000000000000000..1ad0b7a5f019546e442200fd268e74e7e828ae75 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/dsbase.py @@ -0,0 +1,86 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2010, 2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.dnssectypes +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class DSBase(dns.rdata.Rdata): + + """Base class for rdata that is like a DS record""" + + __slots__ = ["key_tag", "algorithm", "digest_type", "digest"] + + # Digest types registry: + # https://www.iana.org/assignments/ds-rr-types/ds-rr-types.xhtml + _digest_length_by_type = { + 1: 20, # SHA-1, RFC 3658 Sec. 2.4 + 2: 32, # SHA-256, RFC 4509 Sec. 2.2 + 3: 32, # GOST R 34.11-94, RFC 5933 Sec. 4 in conjunction with RFC 4490 Sec. 2.1 + 4: 48, # SHA-384, RFC 6605 Sec. 2 + } + + def __init__(self, rdclass, rdtype, key_tag, algorithm, digest_type, digest): + super().__init__(rdclass, rdtype) + self.key_tag = self._as_uint16(key_tag) + self.algorithm = dns.dnssectypes.Algorithm.make(algorithm) + self.digest_type = dns.dnssectypes.DSDigest.make(self._as_uint8(digest_type)) + self.digest = self._as_bytes(digest) + try: + if len(self.digest) != self._digest_length_by_type[self.digest_type]: + raise ValueError("digest length inconsistent with digest type") + except KeyError: + if self.digest_type == 0: # reserved, RFC 3658 Sec. 2.4 + raise ValueError("digest type 0 is reserved") + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + return "%d %d %d %s" % ( + self.key_tag, + self.algorithm, + self.digest_type, + dns.rdata._hexify(self.digest, chunksize=chunksize, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + key_tag = tok.get_uint16() + algorithm = tok.get_string() + digest_type = tok.get_uint8() + digest = tok.concatenate_remaining_identifiers().encode() + digest = binascii.unhexlify(digest) + return cls(rdclass, rdtype, key_tag, algorithm, digest_type, digest) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!HBB", self.key_tag, self.algorithm, self.digest_type) + file.write(header) + file.write(self.digest) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("!HBB") + digest = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], digest) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/euibase.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/euibase.py new file mode 100644 index 0000000000000000000000000000000000000000..4c4068b25de1f6b8fb0ce3c18b2ae9ba659fd75e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/euibase.py @@ -0,0 +1,71 @@ +# Copyright (C) 2015 Red Hat, Inc. +# Author: Petr Spacek <pspacek@redhat.com> +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED 'AS IS' AND RED HAT DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii + +import dns.immutable +import dns.rdata + + +@dns.immutable.immutable +class EUIBase(dns.rdata.Rdata): + + """EUIxx record""" + + # see: rfc7043.txt + + __slots__ = ["eui"] + # define these in subclasses + # byte_len = 6 # 0123456789ab (in hex) + # text_len = byte_len * 3 - 1 # 01-23-45-67-89-ab + + def __init__(self, rdclass, rdtype, eui): + super().__init__(rdclass, rdtype) + self.eui = self._as_bytes(eui) + if len(self.eui) != self.byte_len: + raise dns.exception.FormError( + "EUI%s rdata has to have %s bytes" % (self.byte_len * 8, self.byte_len) + ) + + def to_text(self, origin=None, relativize=True, **kw): + return dns.rdata._hexify(self.eui, chunksize=2, separator=b"-", **kw) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + text = tok.get_string() + if len(text) != cls.text_len: + raise dns.exception.SyntaxError( + "Input text must have %s characters" % cls.text_len + ) + for i in range(2, cls.byte_len * 3 - 1, 3): + if text[i] != "-": + raise dns.exception.SyntaxError("Dash expected at position %s" % i) + text = text.replace("-", "") + try: + data = binascii.unhexlify(text.encode()) + except (ValueError, TypeError) as ex: + raise dns.exception.SyntaxError("Hex decoding error: %s" % str(ex)) + return cls(rdclass, rdtype, data) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(self.eui) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + eui = parser.get_bytes(cls.byte_len) + return cls(rdclass, rdtype, eui) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/mxbase.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/mxbase.py new file mode 100644 index 0000000000000000000000000000000000000000..a6bae0781193b8cf4c6c24e727d8572601d6e0a7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/mxbase.py @@ -0,0 +1,90 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""MX-like base classes.""" + +import struct + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata +import dns.rdtypes.util + + +@dns.immutable.immutable +class MXBase(dns.rdata.Rdata): + + """Base class for rdata that is like an MX record.""" + + __slots__ = ["preference", "exchange"] + + def __init__(self, rdclass, rdtype, preference, exchange): + super().__init__(rdclass, rdtype) + self.preference = self._as_uint16(preference) + self.exchange = self._as_name(exchange) + + def to_text(self, origin=None, relativize=True, **kw): + exchange = self.exchange.choose_relativity(origin, relativize) + return "%d %s" % (self.preference, exchange) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + preference = tok.get_uint16() + exchange = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, preference, exchange) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + pref = struct.pack("!H", self.preference) + file.write(pref) + self.exchange.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + preference = parser.get_uint16() + exchange = parser.get_name(origin) + return cls(rdclass, rdtype, preference, exchange) + + def _processing_priority(self): + return self.preference + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) + + +@dns.immutable.immutable +class UncompressedMX(MXBase): + + """Base class for rdata that is like an MX record, but whose name + is not compressed when converted to DNS wire format, and whose + digestable form is not downcased.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + super()._to_wire(file, None, origin, False) + + +@dns.immutable.immutable +class UncompressedDowncasingMX(MXBase): + + """Base class for rdata that is like an MX record, but whose name + is not compressed when convert to DNS wire format.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + super()._to_wire(file, None, origin, canonicalize) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/nsbase.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/nsbase.py new file mode 100644 index 0000000000000000000000000000000000000000..56d942356585546d216ef8015675a75e9c342fea --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/nsbase.py @@ -0,0 +1,65 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""NS-like base classes.""" + +import dns.exception +import dns.immutable +import dns.name +import dns.rdata + + +@dns.immutable.immutable +class NSBase(dns.rdata.Rdata): + + """Base class for rdata that is like an NS record.""" + + __slots__ = ["target"] + + def __init__(self, rdclass, rdtype, target): + super().__init__(rdclass, rdtype) + self.target = self._as_name(target) + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + return str(target) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + target = tok.get_name(origin, relativize, relativize_to) + return cls(rdclass, rdtype, target) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, compress, origin, canonicalize) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + target = parser.get_name(origin) + return cls(rdclass, rdtype, target) + + +@dns.immutable.immutable +class UncompressedNS(NSBase): + + """Base class for rdata that is like an NS record, but whose name + is not compressed when convert to DNS wire format, and whose + digestable form is not downcased.""" + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + self.target.to_wire(file, None, origin, False) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/svcbbase.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/svcbbase.py new file mode 100644 index 0000000000000000000000000000000000000000..ba5b53d2cb7d0e25d0437b2193f0aae4a3324f26 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/svcbbase.py @@ -0,0 +1,563 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import base64 +import enum +import io +import struct + +import dns.enum +import dns.exception +import dns.immutable +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdata +import dns.rdtypes.util +import dns.tokenizer +import dns.wire + +# Until there is an RFC, this module is experimental and may be changed in +# incompatible ways. + + +class UnknownParamKey(dns.exception.DNSException): + """Unknown SVCB ParamKey""" + + +class ParamKey(dns.enum.IntEnum): + """SVCB ParamKey""" + + MANDATORY = 0 + ALPN = 1 + NO_DEFAULT_ALPN = 2 + PORT = 3 + IPV4HINT = 4 + ECH = 5 + IPV6HINT = 6 + DOHPATH = 7 + + @classmethod + def _maximum(cls): + return 65535 + + @classmethod + def _short_name(cls): + return "SVCBParamKey" + + @classmethod + def _prefix(cls): + return "KEY" + + @classmethod + def _unknown_exception_class(cls): + return UnknownParamKey + + +class Emptiness(enum.IntEnum): + NEVER = 0 + ALWAYS = 1 + ALLOWED = 2 + + +def _validate_key(key): + force_generic = False + if isinstance(key, bytes): + # We decode to latin-1 so we get 0-255 as valid and do NOT interpret + # UTF-8 sequences + key = key.decode("latin-1") + if isinstance(key, str): + if key.lower().startswith("key"): + force_generic = True + if key[3:].startswith("0") and len(key) != 4: + # key has leading zeros + raise ValueError("leading zeros in key") + key = key.replace("-", "_") + return (ParamKey.make(key), force_generic) + + +def key_to_text(key): + return ParamKey.to_text(key).replace("_", "-").lower() + + +# Like rdata escapify, but escapes ',' too. + +_escaped = b'",\\' + + +def _escapify(qstring): + text = "" + for c in qstring: + if c in _escaped: + text += "\\" + chr(c) + elif c >= 0x20 and c < 0x7F: + text += chr(c) + else: + text += "\\%03d" % c + return text + + +def _unescape(value): + if value == "": + return value + unescaped = b"" + l = len(value) + i = 0 + while i < l: + c = value[i] + i += 1 + if c == "\\": + if i >= l: # pragma: no cover (can't happen via tokenizer get()) + raise dns.exception.UnexpectedEnd + c = value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + unescaped += b"%c" % (codepoint) + continue + unescaped += c.encode() + return unescaped + + +def _split(value): + l = len(value) + i = 0 + items = [] + unescaped = b"" + while i < l: + c = value[i] + i += 1 + if c == ord("\\"): + if i >= l: # pragma: no cover (can't happen via tokenizer get()) + raise dns.exception.UnexpectedEnd + c = value[i] + i += 1 + unescaped += b"%c" % (c) + elif c == ord(","): + items.append(unescaped) + unescaped = b"" + else: + unescaped += b"%c" % (c) + items.append(unescaped) + return items + + +@dns.immutable.immutable +class Param: + """Abstract base class for SVCB parameters""" + + @classmethod + def emptiness(cls): + return Emptiness.NEVER + + +@dns.immutable.immutable +class GenericParam(Param): + """Generic SVCB parameter""" + + def __init__(self, value): + self.value = dns.rdata.Rdata._as_bytes(value, True) + + @classmethod + def emptiness(cls): + return Emptiness.ALLOWED + + @classmethod + def from_value(cls, value): + if value is None or len(value) == 0: + return None + else: + return cls(_unescape(value)) + + def to_text(self): + return '"' + dns.rdata._escapify(self.value) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + value = parser.get_bytes(parser.remaining()) + if len(value) == 0: + return None + else: + return cls(value) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(self.value) + + +@dns.immutable.immutable +class MandatoryParam(Param): + def __init__(self, keys): + # check for duplicates + keys = sorted([_validate_key(key)[0] for key in keys]) + prior_k = None + for k in keys: + if k == prior_k: + raise ValueError(f"duplicate key {k:d}") + prior_k = k + if k == ParamKey.MANDATORY: + raise ValueError("listed the mandatory key as mandatory") + self.keys = tuple(keys) + + @classmethod + def from_value(cls, value): + keys = [k.encode() for k in value.split(",")] + return cls(keys) + + def to_text(self): + return '"' + ",".join([key_to_text(key) for key in self.keys]) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + keys = [] + last_key = -1 + while parser.remaining() > 0: + key = parser.get_uint16() + if key < last_key: + raise dns.exception.FormError("manadatory keys not ascending") + last_key = key + keys.append(key) + return cls(keys) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for key in self.keys: + file.write(struct.pack("!H", key)) + + +@dns.immutable.immutable +class ALPNParam(Param): + def __init__(self, ids): + self.ids = dns.rdata.Rdata._as_tuple( + ids, lambda x: dns.rdata.Rdata._as_bytes(x, True, 255, False) + ) + + @classmethod + def from_value(cls, value): + return cls(_split(_unescape(value))) + + def to_text(self): + value = ",".join([_escapify(id) for id in self.ids]) + return '"' + dns.rdata._escapify(value.encode()) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + ids = [] + while parser.remaining() > 0: + id = parser.get_counted_bytes() + ids.append(id) + return cls(ids) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for id in self.ids: + file.write(struct.pack("!B", len(id))) + file.write(id) + + +@dns.immutable.immutable +class NoDefaultALPNParam(Param): + # We don't ever expect to instantiate this class, but we need + # a from_value() and a from_wire_parser(), so we just return None + # from the class methods when things are OK. + + @classmethod + def emptiness(cls): + return Emptiness.ALWAYS + + @classmethod + def from_value(cls, value): + if value is None or value == "": + return None + else: + raise ValueError("no-default-alpn with non-empty value") + + def to_text(self): + raise NotImplementedError # pragma: no cover + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + if parser.remaining() != 0: + raise dns.exception.FormError + return None + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + raise NotImplementedError # pragma: no cover + + +@dns.immutable.immutable +class PortParam(Param): + def __init__(self, port): + self.port = dns.rdata.Rdata._as_uint16(port) + + @classmethod + def from_value(cls, value): + value = int(value) + return cls(value) + + def to_text(self): + return f'"{self.port}"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + port = parser.get_uint16() + return cls(port) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(struct.pack("!H", self.port)) + + +@dns.immutable.immutable +class IPv4HintParam(Param): + def __init__(self, addresses): + self.addresses = dns.rdata.Rdata._as_tuple( + addresses, dns.rdata.Rdata._as_ipv4_address + ) + + @classmethod + def from_value(cls, value): + addresses = value.split(",") + return cls(addresses) + + def to_text(self): + return '"' + ",".join(self.addresses) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + addresses = [] + while parser.remaining() > 0: + ip = parser.get_bytes(4) + addresses.append(dns.ipv4.inet_ntoa(ip)) + return cls(addresses) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for address in self.addresses: + file.write(dns.ipv4.inet_aton(address)) + + +@dns.immutable.immutable +class IPv6HintParam(Param): + def __init__(self, addresses): + self.addresses = dns.rdata.Rdata._as_tuple( + addresses, dns.rdata.Rdata._as_ipv6_address + ) + + @classmethod + def from_value(cls, value): + addresses = value.split(",") + return cls(addresses) + + def to_text(self): + return '"' + ",".join(self.addresses) + '"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + addresses = [] + while parser.remaining() > 0: + ip = parser.get_bytes(16) + addresses.append(dns.ipv6.inet_ntoa(ip)) + return cls(addresses) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + for address in self.addresses: + file.write(dns.ipv6.inet_aton(address)) + + +@dns.immutable.immutable +class ECHParam(Param): + def __init__(self, ech): + self.ech = dns.rdata.Rdata._as_bytes(ech, True) + + @classmethod + def from_value(cls, value): + if "\\" in value: + raise ValueError("escape in ECH value") + value = base64.b64decode(value.encode()) + return cls(value) + + def to_text(self): + b64 = base64.b64encode(self.ech).decode("ascii") + return f'"{b64}"' + + @classmethod + def from_wire_parser(cls, parser, origin=None): # pylint: disable=W0613 + value = parser.get_bytes(parser.remaining()) + return cls(value) + + def to_wire(self, file, origin=None): # pylint: disable=W0613 + file.write(self.ech) + + +_class_for_key = { + ParamKey.MANDATORY: MandatoryParam, + ParamKey.ALPN: ALPNParam, + ParamKey.NO_DEFAULT_ALPN: NoDefaultALPNParam, + ParamKey.PORT: PortParam, + ParamKey.IPV4HINT: IPv4HintParam, + ParamKey.ECH: ECHParam, + ParamKey.IPV6HINT: IPv6HintParam, +} + + +def _validate_and_define(params, key, value): + (key, force_generic) = _validate_key(_unescape(key)) + if key in params: + raise SyntaxError(f'duplicate key "{key:d}"') + cls = _class_for_key.get(key, GenericParam) + emptiness = cls.emptiness() + if value is None: + if emptiness == Emptiness.NEVER: + raise SyntaxError("value cannot be empty") + value = cls.from_value(value) + else: + if force_generic: + value = cls.from_wire_parser(dns.wire.Parser(_unescape(value))) + else: + value = cls.from_value(value) + params[key] = value + + +@dns.immutable.immutable +class SVCBBase(dns.rdata.Rdata): + + """Base class for SVCB-like records""" + + # see: draft-ietf-dnsop-svcb-https-11 + + __slots__ = ["priority", "target", "params"] + + def __init__(self, rdclass, rdtype, priority, target, params): + super().__init__(rdclass, rdtype) + self.priority = self._as_uint16(priority) + self.target = self._as_name(target) + for k, v in params.items(): + k = ParamKey.make(k) + if not isinstance(v, Param) and v is not None: + raise ValueError(f"{k:d} not a Param") + self.params = dns.immutable.Dict(params) + # Make sure any parameter listed as mandatory is present in the + # record. + mandatory = params.get(ParamKey.MANDATORY) + if mandatory: + for key in mandatory.keys: + # Note we have to say "not in" as we have None as a value + # so a get() and a not None test would be wrong. + if key not in params: + raise ValueError(f"key {key:d} declared mandatory but not present") + # The no-default-alpn parameter requires the alpn parameter. + if ParamKey.NO_DEFAULT_ALPN in params: + if ParamKey.ALPN not in params: + raise ValueError("no-default-alpn present, but alpn missing") + + def to_text(self, origin=None, relativize=True, **kw): + target = self.target.choose_relativity(origin, relativize) + params = [] + for key in sorted(self.params.keys()): + value = self.params[key] + if value is None: + params.append(key_to_text(key)) + else: + kv = key_to_text(key) + "=" + value.to_text() + params.append(kv) + if len(params) > 0: + space = " " + else: + space = "" + return "%d %s%s%s" % (self.priority, target, space, " ".join(params)) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + priority = tok.get_uint16() + target = tok.get_name(origin, relativize, relativize_to) + if priority == 0: + token = tok.get() + if not token.is_eol_or_eof(): + raise SyntaxError("parameters in AliasMode") + tok.unget(token) + params = {} + while True: + token = tok.get() + if token.is_eol_or_eof(): + tok.unget(token) + break + if token.ttype != dns.tokenizer.IDENTIFIER: + raise SyntaxError("parameter is not an identifier") + equals = token.value.find("=") + if equals == len(token.value) - 1: + # 'key=', so next token should be a quoted string without + # any intervening whitespace. + key = token.value[:-1] + token = tok.get(want_leading=True) + if token.ttype != dns.tokenizer.QUOTED_STRING: + raise SyntaxError("whitespace after =") + value = token.value + elif equals > 0: + # key=value + key = token.value[:equals] + value = token.value[equals + 1 :] + elif equals == 0: + # =key + raise SyntaxError('parameter cannot start with "="') + else: + # key + key = token.value + value = None + _validate_and_define(params, key, value) + return cls(rdclass, rdtype, priority, target, params) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + file.write(struct.pack("!H", self.priority)) + self.target.to_wire(file, None, origin, False) + for key in sorted(self.params): + file.write(struct.pack("!H", key)) + value = self.params[key] + # placeholder for length (or actual length of empty values) + file.write(struct.pack("!H", 0)) + if value is None: + continue + else: + start = file.tell() + value.to_wire(file, origin) + end = file.tell() + assert end - start < 65536 + file.seek(start - 2) + stuff = struct.pack("!H", end - start) + file.write(stuff) + file.seek(0, io.SEEK_END) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + priority = parser.get_uint16() + target = parser.get_name(origin) + if priority == 0 and parser.remaining() != 0: + raise dns.exception.FormError("parameters in AliasMode") + params = {} + prior_key = -1 + while parser.remaining() > 0: + key = parser.get_uint16() + if key < prior_key: + raise dns.exception.FormError("keys not in order") + prior_key = key + vlen = parser.get_uint16() + pcls = _class_for_key.get(key, GenericParam) + with parser.restrict_to(vlen): + value = pcls.from_wire_parser(parser, origin) + params[key] = value + return cls(rdclass, rdtype, priority, target, params) + + def _processing_priority(self): + return self.priority + + @classmethod + def _processing_order(cls, iterable): + return dns.rdtypes.util.priority_processing_order(iterable) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/tlsabase.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/tlsabase.py new file mode 100644 index 0000000000000000000000000000000000000000..4cdb7ab34f290bda535b282d6b586b773e7074a4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/tlsabase.py @@ -0,0 +1,72 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2005-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import binascii +import struct + +import dns.immutable +import dns.rdata +import dns.rdatatype + + +@dns.immutable.immutable +class TLSABase(dns.rdata.Rdata): + + """Base class for TLSA and SMIMEA records""" + + # see: RFC 6698 + + __slots__ = ["usage", "selector", "mtype", "cert"] + + def __init__(self, rdclass, rdtype, usage, selector, mtype, cert): + super().__init__(rdclass, rdtype) + self.usage = self._as_uint8(usage) + self.selector = self._as_uint8(selector) + self.mtype = self._as_uint8(mtype) + self.cert = self._as_bytes(cert) + + def to_text(self, origin=None, relativize=True, **kw): + kw = kw.copy() + chunksize = kw.pop("chunksize", 128) + return "%d %d %d %s" % ( + self.usage, + self.selector, + self.mtype, + dns.rdata._hexify(self.cert, chunksize=chunksize, **kw), + ) + + @classmethod + def from_text( + cls, rdclass, rdtype, tok, origin=None, relativize=True, relativize_to=None + ): + usage = tok.get_uint8() + selector = tok.get_uint8() + mtype = tok.get_uint8() + cert = tok.concatenate_remaining_identifiers().encode() + cert = binascii.unhexlify(cert) + return cls(rdclass, rdtype, usage, selector, mtype, cert) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + header = struct.pack("!BBB", self.usage, self.selector, self.mtype) + file.write(header) + file.write(self.cert) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + header = parser.get_struct("BBB") + cert = parser.get_remaining() + return cls(rdclass, rdtype, header[0], header[1], header[2], cert) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/txtbase.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/txtbase.py new file mode 100644 index 0000000000000000000000000000000000000000..fdbfb6465f81188512b47b7ed9a58a9ddca89e74 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/txtbase.py @@ -0,0 +1,107 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""TXT-like base class.""" + +import struct +from typing import Any, Dict, Iterable, Optional, Tuple, Union + +import dns.exception +import dns.immutable +import dns.rdata +import dns.tokenizer + + +@dns.immutable.immutable +class TXTBase(dns.rdata.Rdata): + + """Base class for rdata that is like a TXT record (see RFC 1035).""" + + __slots__ = ["strings"] + + def __init__( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + strings: Iterable[Union[bytes, str]], + ): + """Initialize a TXT-like rdata. + + *rdclass*, an ``int`` is the rdataclass of the Rdata. + + *rdtype*, an ``int`` is the rdatatype of the Rdata. + + *strings*, a tuple of ``bytes`` + """ + super().__init__(rdclass, rdtype) + self.strings: Tuple[bytes] = self._as_tuple( + strings, lambda x: self._as_bytes(x, True, 255) + ) + + def to_text( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any] + ) -> str: + txt = "" + prefix = "" + for s in self.strings: + txt += '{}"{}"'.format(prefix, dns.rdata._escapify(s)) + prefix = " " + return txt + + @classmethod + def from_text( + cls, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + tok: dns.tokenizer.Tokenizer, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, + ) -> dns.rdata.Rdata: + strings = [] + for token in tok.get_remaining(): + token = token.unescape_to_bytes() + # The 'if' below is always true in the current code, but we + # are leaving this check in in case things change some day. + if not ( + token.is_quoted_string() or token.is_identifier() + ): # pragma: no cover + raise dns.exception.SyntaxError("expected a string") + if len(token.value) > 255: + raise dns.exception.SyntaxError("string too long") + strings.append(token.value) + if len(strings) == 0: + raise dns.exception.UnexpectedEnd + return cls(rdclass, rdtype, strings) + + def _to_wire(self, file, compress=None, origin=None, canonicalize=False): + for s in self.strings: + l = len(s) + assert l < 256 + file.write(struct.pack("!B", l)) + file.write(s) + + @classmethod + def from_wire_parser(cls, rdclass, rdtype, parser, origin=None): + strings = [] + while parser.remaining() > 0: + s = parser.get_counted_bytes() + strings.append(s) + return cls(rdclass, rdtype, strings) diff --git a/backend/test/lib/python3.8/site-packages/dns/rdtypes/util.py b/backend/test/lib/python3.8/site-packages/dns/rdtypes/util.py new file mode 100644 index 0000000000000000000000000000000000000000..54908fdc5a15f3b1b459bad6700974c562b27d17 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rdtypes/util.py @@ -0,0 +1,257 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import collections +import random +import struct +from typing import Any, List + +import dns.exception +import dns.ipv4 +import dns.ipv6 +import dns.name +import dns.rdata + + +class Gateway: + """A helper class for the IPSECKEY gateway and AMTRELAY relay fields""" + + name = "" + + def __init__(self, type, gateway=None): + self.type = dns.rdata.Rdata._as_uint8(type) + self.gateway = gateway + self._check() + + @classmethod + def _invalid_type(cls, gateway_type): + return f"invalid {cls.name} type: {gateway_type}" + + def _check(self): + if self.type == 0: + if self.gateway not in (".", None): + raise SyntaxError(f"invalid {self.name} for type 0") + self.gateway = None + elif self.type == 1: + # check that it's OK + dns.ipv4.inet_aton(self.gateway) + elif self.type == 2: + # check that it's OK + dns.ipv6.inet_aton(self.gateway) + elif self.type == 3: + if not isinstance(self.gateway, dns.name.Name): + raise SyntaxError(f"invalid {self.name}; not a name") + else: + raise SyntaxError(self._invalid_type(self.type)) + + def to_text(self, origin=None, relativize=True): + if self.type == 0: + return "." + elif self.type in (1, 2): + return self.gateway + elif self.type == 3: + return str(self.gateway.choose_relativity(origin, relativize)) + else: + raise ValueError(self._invalid_type(self.type)) # pragma: no cover + + @classmethod + def from_text( + cls, gateway_type, tok, origin=None, relativize=True, relativize_to=None + ): + if gateway_type in (0, 1, 2): + gateway = tok.get_string() + elif gateway_type == 3: + gateway = tok.get_name(origin, relativize, relativize_to) + else: + raise dns.exception.SyntaxError( + cls._invalid_type(gateway_type) + ) # pragma: no cover + return cls(gateway_type, gateway) + + # pylint: disable=unused-argument + def to_wire(self, file, compress=None, origin=None, canonicalize=False): + if self.type == 0: + pass + elif self.type == 1: + file.write(dns.ipv4.inet_aton(self.gateway)) + elif self.type == 2: + file.write(dns.ipv6.inet_aton(self.gateway)) + elif self.type == 3: + self.gateway.to_wire(file, None, origin, False) + else: + raise ValueError(self._invalid_type(self.type)) # pragma: no cover + + # pylint: enable=unused-argument + + @classmethod + def from_wire_parser(cls, gateway_type, parser, origin=None): + if gateway_type == 0: + gateway = None + elif gateway_type == 1: + gateway = dns.ipv4.inet_ntoa(parser.get_bytes(4)) + elif gateway_type == 2: + gateway = dns.ipv6.inet_ntoa(parser.get_bytes(16)) + elif gateway_type == 3: + gateway = parser.get_name(origin) + else: + raise dns.exception.FormError(cls._invalid_type(gateway_type)) + return cls(gateway_type, gateway) + + +class Bitmap: + """A helper class for the NSEC/NSEC3/CSYNC type bitmaps""" + + type_name = "" + + def __init__(self, windows=None): + last_window = -1 + self.windows = windows + for window, bitmap in self.windows: + if not isinstance(window, int): + raise ValueError(f"bad {self.type_name} window type") + if window <= last_window: + raise ValueError(f"bad {self.type_name} window order") + if window > 256: + raise ValueError(f"bad {self.type_name} window number") + last_window = window + if not isinstance(bitmap, bytes): + raise ValueError(f"bad {self.type_name} octets type") + if len(bitmap) == 0 or len(bitmap) > 32: + raise ValueError(f"bad {self.type_name} octets") + + def to_text(self) -> str: + text = "" + for window, bitmap in self.windows: + bits = [] + for i, byte in enumerate(bitmap): + for j in range(0, 8): + if byte & (0x80 >> j): + rdtype = window * 256 + i * 8 + j + bits.append(dns.rdatatype.to_text(rdtype)) + text += " " + " ".join(bits) + return text + + @classmethod + def from_text(cls, tok: "dns.tokenizer.Tokenizer") -> "Bitmap": + rdtypes = [] + for token in tok.get_remaining(): + rdtype = dns.rdatatype.from_text(token.unescape().value) + if rdtype == 0: + raise dns.exception.SyntaxError(f"{cls.type_name} with bit 0") + rdtypes.append(rdtype) + return cls.from_rdtypes(rdtypes) + + @classmethod + def from_rdtypes(cls, rdtypes: List[dns.rdatatype.RdataType]) -> "Bitmap": + rdtypes = sorted(rdtypes) + window = 0 + octets = 0 + prior_rdtype = 0 + bitmap = bytearray(b"\0" * 32) + windows = [] + for rdtype in rdtypes: + if rdtype == prior_rdtype: + continue + prior_rdtype = rdtype + new_window = rdtype // 256 + if new_window != window: + if octets != 0: + windows.append((window, bytes(bitmap[0:octets]))) + bitmap = bytearray(b"\0" * 32) + window = new_window + offset = rdtype % 256 + byte = offset // 8 + bit = offset % 8 + octets = byte + 1 + bitmap[byte] = bitmap[byte] | (0x80 >> bit) + if octets != 0: + windows.append((window, bytes(bitmap[0:octets]))) + return cls(windows) + + def to_wire(self, file: Any) -> None: + for window, bitmap in self.windows: + file.write(struct.pack("!BB", window, len(bitmap))) + file.write(bitmap) + + @classmethod + def from_wire_parser(cls, parser: "dns.wire.Parser") -> "Bitmap": + windows = [] + while parser.remaining() > 0: + window = parser.get_uint8() + bitmap = parser.get_counted_bytes() + windows.append((window, bitmap)) + return cls(windows) + + +def _priority_table(items): + by_priority = collections.defaultdict(list) + for rdata in items: + by_priority[rdata._processing_priority()].append(rdata) + return by_priority + + +def priority_processing_order(iterable): + items = list(iterable) + if len(items) == 1: + return items + by_priority = _priority_table(items) + ordered = [] + for k in sorted(by_priority.keys()): + rdatas = by_priority[k] + random.shuffle(rdatas) + ordered.extend(rdatas) + return ordered + + +_no_weight = 0.1 + + +def weighted_processing_order(iterable): + items = list(iterable) + if len(items) == 1: + return items + by_priority = _priority_table(items) + ordered = [] + for k in sorted(by_priority.keys()): + rdatas = by_priority[k] + total = sum(rdata._processing_weight() or _no_weight for rdata in rdatas) + while len(rdatas) > 1: + r = random.uniform(0, total) + for n, rdata in enumerate(rdatas): + weight = rdata._processing_weight() or _no_weight + if weight > r: + break + r -= weight + total -= weight + ordered.append(rdata) # pylint: disable=undefined-loop-variable + del rdatas[n] # pylint: disable=undefined-loop-variable + ordered.append(rdatas[0]) + return ordered + + +def parse_formatted_hex(formatted, num_chunks, chunk_size, separator): + if len(formatted) != num_chunks * (chunk_size + 1) - 1: + raise ValueError("invalid formatted hex string") + value = b"" + for _ in range(num_chunks): + chunk = formatted[0:chunk_size] + value += int(chunk, 16).to_bytes(chunk_size // 2, "big") + formatted = formatted[chunk_size:] + if len(formatted) > 0 and formatted[0] != separator: + raise ValueError("invalid formatted hex string") + formatted = formatted[1:] + return value diff --git a/backend/test/lib/python3.8/site-packages/dns/renderer.py b/backend/test/lib/python3.8/site-packages/dns/renderer.py new file mode 100644 index 0000000000000000000000000000000000000000..53e7c0f6faf98477594b8ce65162160ce2ec0f98 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/renderer.py @@ -0,0 +1,324 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Help for building DNS wire format messages""" + +import contextlib +import io +import random +import struct +import time + +import dns.exception +import dns.tsig + +QUESTION = 0 +ANSWER = 1 +AUTHORITY = 2 +ADDITIONAL = 3 + + +class Renderer: + """Helper class for building DNS wire-format messages. + + Most applications can use the higher-level L{dns.message.Message} + class and its to_wire() method to generate wire-format messages. + This class is for those applications which need finer control + over the generation of messages. + + Typical use:: + + r = dns.renderer.Renderer(id=1, flags=0x80, max_size=512) + r.add_question(qname, qtype, qclass) + r.add_rrset(dns.renderer.ANSWER, rrset_1) + r.add_rrset(dns.renderer.ANSWER, rrset_2) + r.add_rrset(dns.renderer.AUTHORITY, ns_rrset) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_1) + r.add_rrset(dns.renderer.ADDITIONAL, ad_rrset_2) + r.add_edns(0, 0, 4096) + r.write_header() + r.add_tsig(keyname, secret, 300, 1, 0, '', request_mac) + wire = r.get_wire() + + If padding is going to be used, then the OPT record MUST be + written after everything else in the additional section except for + the TSIG (if any). + + output, an io.BytesIO, where rendering is written + + id: the message id + + flags: the message flags + + max_size: the maximum size of the message + + origin: the origin to use when rendering relative names + + compress: the compression table + + section: an int, the section currently being rendered + + counts: list of the number of RRs in each section + + mac: the MAC of the rendered message (if TSIG was used) + """ + + def __init__(self, id=None, flags=0, max_size=65535, origin=None): + """Initialize a new renderer.""" + + self.output = io.BytesIO() + if id is None: + self.id = random.randint(0, 65535) + else: + self.id = id + self.flags = flags + self.max_size = max_size + self.origin = origin + self.compress = {} + self.section = QUESTION + self.counts = [0, 0, 0, 0] + self.output.write(b"\x00" * 12) + self.mac = "" + self.reserved = 0 + self.was_padded = False + + def _rollback(self, where): + """Truncate the output buffer at offset *where*, and remove any + compression table entries that pointed beyond the truncation + point. + """ + + self.output.seek(where) + self.output.truncate() + keys_to_delete = [] + for k, v in self.compress.items(): + if v >= where: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.compress[k] + + def _set_section(self, section): + """Set the renderer's current section. + + Sections must be rendered order: QUESTION, ANSWER, AUTHORITY, + ADDITIONAL. Sections may be empty. + + Raises dns.exception.FormError if an attempt was made to set + a section value less than the current section. + """ + + if self.section != section: + if self.section > section: + raise dns.exception.FormError + self.section = section + + @contextlib.contextmanager + def _track_size(self): + start = self.output.tell() + yield start + if self.output.tell() > self.max_size: + self._rollback(start) + raise dns.exception.TooBig + + def add_question(self, qname, rdtype, rdclass=dns.rdataclass.IN): + """Add a question to the message.""" + + self._set_section(QUESTION) + with self._track_size(): + qname.to_wire(self.output, self.compress, self.origin) + self.output.write(struct.pack("!HH", rdtype, rdclass)) + self.counts[QUESTION] += 1 + + def add_rrset(self, section, rrset, **kw): + """Add the rrset to the specified section. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + with self._track_size(): + n = rrset.to_wire(self.output, self.compress, self.origin, **kw) + self.counts[section] += n + + def add_rdataset(self, section, name, rdataset, **kw): + """Add the rdataset to the specified section, using the specified + name as the owner name. + + Any keyword arguments are passed on to the rdataset's to_wire() + routine. + """ + + self._set_section(section) + with self._track_size(): + n = rdataset.to_wire(name, self.output, self.compress, self.origin, **kw) + self.counts[section] += n + + def add_opt(self, opt, pad=0, opt_size=0, tsig_size=0): + """Add *opt* to the additional section, applying padding if desired. The + padding will take the specified precomputed OPT size and TSIG size into + account. + + Note that we don't have reliable way of knowing how big a GSS-TSIG digest + might be, so we we might not get an even multiple of the pad in that case.""" + if pad: + ttl = opt.ttl + assert opt_size >= 11 + opt_rdata = opt[0] + size_without_padding = self.output.tell() + opt_size + tsig_size + remainder = size_without_padding % pad + if remainder: + pad = b"\x00" * (pad - remainder) + else: + pad = b"" + options = list(opt_rdata.options) + options.append(dns.edns.GenericOption(dns.edns.OptionType.PADDING, pad)) + opt = dns.message.Message._make_opt(ttl, opt_rdata.rdclass, options) + self.was_padded = True + self.add_rrset(ADDITIONAL, opt) + + def add_edns(self, edns, ednsflags, payload, options=None): + """Add an EDNS OPT record to the message.""" + + # make sure the EDNS version in ednsflags agrees with edns + ednsflags &= 0xFF00FFFF + ednsflags |= edns << 16 + opt = dns.message.Message._make_opt(ednsflags, payload, options) + self.add_opt(opt) + + def add_tsig( + self, + keyname, + secret, + fudge, + id, + tsig_error, + other_data, + request_mac, + algorithm=dns.tsig.default_algorithm, + ): + """Add a TSIG signature to the message.""" + + s = self.output.getvalue() + + if isinstance(secret, dns.tsig.Key): + key = secret + else: + key = dns.tsig.Key(keyname, secret, algorithm) + tsig = dns.message.Message._make_tsig( + keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data + ) + (tsig, _) = dns.tsig.sign(s, key, tsig[0], int(time.time()), request_mac) + self._write_tsig(tsig, keyname) + + def add_multi_tsig( + self, + ctx, + keyname, + secret, + fudge, + id, + tsig_error, + other_data, + request_mac, + algorithm=dns.tsig.default_algorithm, + ): + """Add a TSIG signature to the message. Unlike add_tsig(), this can be + used for a series of consecutive DNS envelopes, e.g. for a zone + transfer over TCP [RFC2845, 4.4]. + + For the first message in the sequence, give ctx=None. For each + subsequent message, give the ctx that was returned from the + add_multi_tsig() call for the previous message.""" + + s = self.output.getvalue() + + if isinstance(secret, dns.tsig.Key): + key = secret + else: + key = dns.tsig.Key(keyname, secret, algorithm) + tsig = dns.message.Message._make_tsig( + keyname, algorithm, 0, fudge, b"", id, tsig_error, other_data + ) + (tsig, ctx) = dns.tsig.sign( + s, key, tsig[0], int(time.time()), request_mac, ctx, True + ) + self._write_tsig(tsig, keyname) + return ctx + + def _write_tsig(self, tsig, keyname): + if self.was_padded: + compress = None + else: + compress = self.compress + self._set_section(ADDITIONAL) + with self._track_size(): + keyname.to_wire(self.output, compress, self.origin) + self.output.write( + struct.pack("!HHIH", dns.rdatatype.TSIG, dns.rdataclass.ANY, 0, 0) + ) + rdata_start = self.output.tell() + tsig.to_wire(self.output) + + after = self.output.tell() + self.output.seek(rdata_start - 2) + self.output.write(struct.pack("!H", after - rdata_start)) + self.counts[ADDITIONAL] += 1 + self.output.seek(10) + self.output.write(struct.pack("!H", self.counts[ADDITIONAL])) + self.output.seek(0, io.SEEK_END) + + def write_header(self): + """Write the DNS message header. + + Writing the DNS message header is done after all sections + have been rendered, but before the optional TSIG signature + is added. + """ + + self.output.seek(0) + self.output.write( + struct.pack( + "!HHHHHH", + self.id, + self.flags, + self.counts[0], + self.counts[1], + self.counts[2], + self.counts[3], + ) + ) + self.output.seek(0, io.SEEK_END) + + def get_wire(self): + """Return the wire format message.""" + + return self.output.getvalue() + + def reserve(self, size: int) -> None: + """Reserve *size* bytes.""" + if size < 0: + raise ValueError("reserved amount must be non-negative") + if size > self.max_size: + raise ValueError("cannot reserve more than the maximum size") + self.reserved += size + self.max_size -= size + + def release_reserved(self) -> None: + """Release the reserved bytes.""" + self.max_size += self.reserved + self.reserved = 0 diff --git a/backend/test/lib/python3.8/site-packages/dns/resolver.py b/backend/test/lib/python3.8/site-packages/dns/resolver.py new file mode 100644 index 0000000000000000000000000000000000000000..f08f824d0e587d0e8ce4a3e89a0df87221e3f44f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/resolver.py @@ -0,0 +1,2054 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS stub resolver.""" + +import contextlib +import random +import socket +import sys +import threading +import time +import warnings +from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union +from urllib.parse import urlparse + +import dns._ddr +import dns.edns +import dns.exception +import dns.flags +import dns.inet +import dns.ipv4 +import dns.ipv6 +import dns.message +import dns.name +import dns.nameserver +import dns.query +import dns.rcode +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.svcbbase +import dns.reversename +import dns.tsig + +if sys.platform == "win32": + import dns.win32util + + +class NXDOMAIN(dns.exception.DNSException): + """The DNS query name does not exist.""" + + supp_kwargs = {"qnames", "responses"} + fmt = None # we have our own __str__ implementation + + # pylint: disable=arguments-differ + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _check_kwargs(self, qnames, responses=None): + if not isinstance(qnames, (list, tuple, set)): + raise AttributeError("qnames must be a list, tuple or set") + if len(qnames) == 0: + raise AttributeError("qnames must contain at least one element") + if responses is None: + responses = {} + elif not isinstance(responses, dict): + raise AttributeError("responses must be a dict(qname=response)") + kwargs = dict(qnames=qnames, responses=responses) + return kwargs + + def __str__(self) -> str: + if "qnames" not in self.kwargs: + return super().__str__() + qnames = self.kwargs["qnames"] + if len(qnames) > 1: + msg = "None of DNS query names exist" + else: + msg = "The DNS query name does not exist" + qnames = ", ".join(map(str, qnames)) + return "{}: {}".format(msg, qnames) + + @property + def canonical_name(self): + """Return the unresolved canonical name.""" + if "qnames" not in self.kwargs: + raise TypeError("parametrized exception required") + for qname in self.kwargs["qnames"]: + response = self.kwargs["responses"][qname] + try: + cname = response.canonical_name() + if cname != qname: + return cname + except Exception: + # We can just eat this exception as it means there was + # something wrong with the response. + pass + return self.kwargs["qnames"][0] + + def __add__(self, e_nx): + """Augment by results from another NXDOMAIN exception.""" + qnames0 = list(self.kwargs.get("qnames", [])) + responses0 = dict(self.kwargs.get("responses", {})) + responses1 = e_nx.kwargs.get("responses", {}) + for qname1 in e_nx.kwargs.get("qnames", []): + if qname1 not in qnames0: + qnames0.append(qname1) + if qname1 in responses1: + responses0[qname1] = responses1[qname1] + return NXDOMAIN(qnames=qnames0, responses=responses0) + + def qnames(self): + """All of the names that were tried. + + Returns a list of ``dns.name.Name``. + """ + return self.kwargs["qnames"] + + def responses(self): + """A map from queried names to their NXDOMAIN responses. + + Returns a dict mapping a ``dns.name.Name`` to a + ``dns.message.Message``. + """ + return self.kwargs["responses"] + + def response(self, qname): + """The response for query *qname*. + + Returns a ``dns.message.Message``. + """ + return self.kwargs["responses"][qname] + + +class YXDOMAIN(dns.exception.DNSException): + """The DNS query name is too long after DNAME substitution.""" + + +ErrorTuple = Tuple[ + Optional[str], + bool, + int, + Union[Exception, str], + Optional[dns.message.Message], +] + + +def _errors_to_text(errors: List[ErrorTuple]) -> List[str]: + """Turn a resolution errors trace into a list of text.""" + texts = [] + for err in errors: + texts.append("Server {} answered {}".format(err[0], err[3])) + return texts + + +class LifetimeTimeout(dns.exception.Timeout): + """The resolution lifetime expired.""" + + msg = "The resolution lifetime expired." + fmt = "%s after {timeout:.3f} seconds: {errors}" % msg[:-1] + supp_kwargs = {"timeout", "errors"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _fmt_kwargs(self, **kwargs): + srv_msgs = _errors_to_text(kwargs["errors"]) + return super()._fmt_kwargs( + timeout=kwargs["timeout"], errors="; ".join(srv_msgs) + ) + + +# We added more detail to resolution timeouts, but they are still +# subclasses of dns.exception.Timeout for backwards compatibility. We also +# keep dns.resolver.Timeout defined for backwards compatibility. +Timeout = LifetimeTimeout + + +class NoAnswer(dns.exception.DNSException): + """The DNS response does not contain an answer to the question.""" + + fmt = "The DNS response does not contain an answer to the question: {query}" + supp_kwargs = {"response"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _fmt_kwargs(self, **kwargs): + return super()._fmt_kwargs(query=kwargs["response"].question) + + def response(self): + return self.kwargs["response"] + + +class NoNameservers(dns.exception.DNSException): + """All nameservers failed to answer the query. + + errors: list of servers and respective errors + The type of errors is + [(server IP address, any object convertible to string)]. + Non-empty errors list will add explanatory message () + """ + + msg = "All nameservers failed to answer the query." + fmt = "%s {query}: {errors}" % msg[:-1] + supp_kwargs = {"request", "errors"} + + # We do this as otherwise mypy complains about unexpected keyword argument + # idna_exception + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def _fmt_kwargs(self, **kwargs): + srv_msgs = _errors_to_text(kwargs["errors"]) + return super()._fmt_kwargs( + query=kwargs["request"].question, errors="; ".join(srv_msgs) + ) + + +class NotAbsolute(dns.exception.DNSException): + """An absolute domain name is required but a relative name was provided.""" + + +class NoRootSOA(dns.exception.DNSException): + """There is no SOA RR at the DNS root name. This should never happen!""" + + +class NoMetaqueries(dns.exception.DNSException): + """DNS metaqueries are not allowed.""" + + +class NoResolverConfiguration(dns.exception.DNSException): + """Resolver configuration could not be read or specified no nameservers.""" + + +class Answer: + """DNS stub resolver answer. + + Instances of this class bundle up the result of a successful DNS + resolution. + + For convenience, the answer object implements much of the sequence + protocol, forwarding to its ``rrset`` attribute. E.g. + ``for a in answer`` is equivalent to ``for a in answer.rrset``. + ``answer[i]`` is equivalent to ``answer.rrset[i]``, and + ``answer[i:j]`` is equivalent to ``answer.rrset[i:j]``. + + Note that CNAMEs or DNAMEs in the response may mean that answer + RRset's name might not be the query name. + """ + + def __init__( + self, + qname: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + rdclass: dns.rdataclass.RdataClass, + response: dns.message.QueryMessage, + nameserver: Optional[str] = None, + port: Optional[int] = None, + ) -> None: + self.qname = qname + self.rdtype = rdtype + self.rdclass = rdclass + self.response = response + self.nameserver = nameserver + self.port = port + self.chaining_result = response.resolve_chaining() + # Copy some attributes out of chaining_result for backwards + # compatibility and convenience. + self.canonical_name = self.chaining_result.canonical_name + self.rrset = self.chaining_result.answer + self.expiration = time.time() + self.chaining_result.minimum_ttl + + def __getattr__(self, attr): # pragma: no cover + if attr == "name": + return self.rrset.name + elif attr == "ttl": + return self.rrset.ttl + elif attr == "covers": + return self.rrset.covers + elif attr == "rdclass": + return self.rrset.rdclass + elif attr == "rdtype": + return self.rrset.rdtype + else: + raise AttributeError(attr) + + def __len__(self) -> int: + return self.rrset and len(self.rrset) or 0 + + def __iter__(self): + return self.rrset and iter(self.rrset) or iter(tuple()) + + def __getitem__(self, i): + if self.rrset is None: + raise IndexError + return self.rrset[i] + + def __delitem__(self, i): + if self.rrset is None: + raise IndexError + del self.rrset[i] + + +class Answers(dict): + """A dict of DNS stub resolver answers, indexed by type.""" + + +class HostAnswers(Answers): + """A dict of DNS stub resolver answers to a host name lookup, indexed by + type. + """ + + @classmethod + def make( + cls, + v6: Optional[Answer] = None, + v4: Optional[Answer] = None, + add_empty: bool = True, + ) -> "HostAnswers": + answers = HostAnswers() + if v6 is not None and (add_empty or v6.rrset): + answers[dns.rdatatype.AAAA] = v6 + if v4 is not None and (add_empty or v4.rrset): + answers[dns.rdatatype.A] = v4 + return answers + + # Returns pairs of (address, family) from this result, potentiallys + # filtering by address family. + def addresses_and_families( + self, family: int = socket.AF_UNSPEC + ) -> Iterator[Tuple[str, int]]: + if family == socket.AF_UNSPEC: + yield from self.addresses_and_families(socket.AF_INET6) + yield from self.addresses_and_families(socket.AF_INET) + return + elif family == socket.AF_INET6: + answer = self.get(dns.rdatatype.AAAA) + elif family == socket.AF_INET: + answer = self.get(dns.rdatatype.A) + else: + raise NotImplementedError(f"unknown address family {family}") + if answer: + for rdata in answer: + yield (rdata.address, family) + + # Returns addresses from this result, potentially filtering by + # address family. + def addresses(self, family: int = socket.AF_UNSPEC) -> Iterator[str]: + return (pair[0] for pair in self.addresses_and_families(family)) + + # Returns the canonical name from this result. + def canonical_name(self) -> dns.name.Name: + answer = self.get(dns.rdatatype.AAAA, self.get(dns.rdatatype.A)) + return answer.canonical_name + + +class CacheStatistics: + """Cache Statistics""" + + def __init__(self, hits: int = 0, misses: int = 0) -> None: + self.hits = hits + self.misses = misses + + def reset(self) -> None: + self.hits = 0 + self.misses = 0 + + def clone(self) -> "CacheStatistics": + return CacheStatistics(self.hits, self.misses) + + +class CacheBase: + def __init__(self) -> None: + self.lock = threading.Lock() + self.statistics = CacheStatistics() + + def reset_statistics(self) -> None: + """Reset all statistics to zero.""" + with self.lock: + self.statistics.reset() + + def hits(self) -> int: + """How many hits has the cache had?""" + with self.lock: + return self.statistics.hits + + def misses(self) -> int: + """How many misses has the cache had?""" + with self.lock: + return self.statistics.misses + + def get_statistics_snapshot(self) -> CacheStatistics: + """Return a consistent snapshot of all the statistics. + + If running with multiple threads, it's better to take a + snapshot than to call statistics methods such as hits() and + misses() individually. + """ + with self.lock: + return self.statistics.clone() + + +CacheKey = Tuple[dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass] + + +class Cache(CacheBase): + """Simple thread-safe DNS answer cache.""" + + def __init__(self, cleaning_interval: float = 300.0) -> None: + """*cleaning_interval*, a ``float`` is the number of seconds between + periodic cleanings. + """ + + super().__init__() + self.data: Dict[CacheKey, Answer] = {} + self.cleaning_interval = cleaning_interval + self.next_cleaning: float = time.time() + self.cleaning_interval + + def _maybe_clean(self) -> None: + """Clean the cache if it's time to do so.""" + + now = time.time() + if self.next_cleaning <= now: + keys_to_delete = [] + for k, v in self.data.items(): + if v.expiration <= now: + keys_to_delete.append(k) + for k in keys_to_delete: + del self.data[k] + now = time.time() + self.next_cleaning = now + self.cleaning_interval + + def get(self, key: CacheKey) -> Optional[Answer]: + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + with self.lock: + self._maybe_clean() + v = self.data.get(key) + if v is None or v.expiration <= time.time(): + self.statistics.misses += 1 + return None + self.statistics.hits += 1 + return v + + def put(self, key: CacheKey, value: Answer) -> None: + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + with self.lock: + self._maybe_clean() + self.data[key] = value + + def flush(self, key: Optional[CacheKey] = None) -> None: + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise the entire cache + is flushed. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + """ + + with self.lock: + if key is not None: + if key in self.data: + del self.data[key] + else: + self.data = {} + self.next_cleaning = time.time() + self.cleaning_interval + + +class LRUCacheNode: + """LRUCache node.""" + + def __init__(self, key, value): + self.key = key + self.value = value + self.hits = 0 + self.prev = self + self.next = self + + def link_after(self, node: "LRUCacheNode") -> None: + self.prev = node + self.next = node.next + node.next.prev = self + node.next = self + + def unlink(self) -> None: + self.next.prev = self.prev + self.prev.next = self.next + + +class LRUCache(CacheBase): + """Thread-safe, bounded, least-recently-used DNS answer cache. + + This cache is better than the simple cache (above) if you're + running a web crawler or other process that does a lot of + resolutions. The LRUCache has a maximum number of nodes, and when + it is full, the least-recently used node is removed to make space + for a new one. + """ + + def __init__(self, max_size: int = 100000) -> None: + """*max_size*, an ``int``, is the maximum number of nodes to cache; + it must be greater than 0. + """ + + super().__init__() + self.data: Dict[CacheKey, LRUCacheNode] = {} + self.set_max_size(max_size) + self.sentinel: LRUCacheNode = LRUCacheNode(None, None) + self.sentinel.prev = self.sentinel + self.sentinel.next = self.sentinel + + def set_max_size(self, max_size: int) -> None: + if max_size < 1: + max_size = 1 + self.max_size = max_size + + def get(self, key: CacheKey) -> Optional[Answer]: + """Get the answer associated with *key*. + + Returns None if no answer is cached for the key. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + Returns a ``dns.resolver.Answer`` or ``None``. + """ + + with self.lock: + node = self.data.get(key) + if node is None: + self.statistics.misses += 1 + return None + # Unlink because we're either going to move the node to the front + # of the LRU list or we're going to free it. + node.unlink() + if node.value.expiration <= time.time(): + del self.data[node.key] + self.statistics.misses += 1 + return None + node.link_after(self.sentinel) + self.statistics.hits += 1 + node.hits += 1 + return node.value + + def get_hits_for_key(self, key: CacheKey) -> int: + """Return the number of cache hits associated with the specified key.""" + with self.lock: + node = self.data.get(key) + if node is None or node.value.expiration <= time.time(): + return 0 + else: + return node.hits + + def put(self, key: CacheKey, value: Answer) -> None: + """Associate key and value in the cache. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + + *value*, a ``dns.resolver.Answer``, the answer. + """ + + with self.lock: + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + while len(self.data) >= self.max_size: + gnode = self.sentinel.prev + gnode.unlink() + del self.data[gnode.key] + node = LRUCacheNode(key, value) + node.link_after(self.sentinel) + self.data[key] = node + + def flush(self, key: Optional[CacheKey] = None) -> None: + """Flush the cache. + + If *key* is not ``None``, only that item is flushed. Otherwise the entire cache + is flushed. + + *key*, a ``(dns.name.Name, dns.rdatatype.RdataType, dns.rdataclass.RdataClass)`` + tuple whose values are the query name, rdtype, and rdclass respectively. + """ + + with self.lock: + if key is not None: + node = self.data.get(key) + if node is not None: + node.unlink() + del self.data[node.key] + else: + gnode = self.sentinel.next + while gnode != self.sentinel: + next = gnode.next + gnode.unlink() + gnode = next + self.data = {} + + +class _Resolution: + """Helper class for dns.resolver.Resolver.resolve(). + + All of the "business logic" of resolution is encapsulated in this + class, allowing us to have multiple resolve() implementations + using different I/O schemes without copying all of the + complicated logic. + + This class is a "friend" to dns.resolver.Resolver and manipulates + resolver data structures directly. + """ + + def __init__( + self, + resolver: "BaseResolver", + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + rdclass: Union[dns.rdataclass.RdataClass, str], + tcp: bool, + raise_on_no_answer: bool, + search: Optional[bool], + ) -> None: + if isinstance(qname, str): + qname = dns.name.from_text(qname, None) + rdtype = dns.rdatatype.RdataType.make(rdtype) + if dns.rdatatype.is_metatype(rdtype): + raise NoMetaqueries + rdclass = dns.rdataclass.RdataClass.make(rdclass) + if dns.rdataclass.is_metaclass(rdclass): + raise NoMetaqueries + self.resolver = resolver + self.qnames_to_try = resolver._get_qnames_to_try(qname, search) + self.qnames = self.qnames_to_try[:] + self.rdtype = rdtype + self.rdclass = rdclass + self.tcp = tcp + self.raise_on_no_answer = raise_on_no_answer + self.nxdomain_responses: Dict[dns.name.Name, dns.message.QueryMessage] = {} + # Initialize other things to help analysis tools + self.qname = dns.name.empty + self.nameservers: List[dns.nameserver.Nameserver] = [] + self.current_nameservers: List[dns.nameserver.Nameserver] = [] + self.errors: List[ErrorTuple] = [] + self.nameserver: Optional[dns.nameserver.Nameserver] = None + self.tcp_attempt = False + self.retry_with_tcp = False + self.request: Optional[dns.message.QueryMessage] = None + self.backoff = 0.0 + + def next_request( + self, + ) -> Tuple[Optional[dns.message.QueryMessage], Optional[Answer]]: + """Get the next request to send, and check the cache. + + Returns a (request, answer) tuple. At most one of request or + answer will not be None. + """ + + # We return a tuple instead of Union[Message,Answer] as it lets + # the caller avoid isinstance(). + + while len(self.qnames) > 0: + self.qname = self.qnames.pop(0) + + # Do we know the answer? + if self.resolver.cache: + answer = self.resolver.cache.get( + (self.qname, self.rdtype, self.rdclass) + ) + if answer is not None: + if answer.rrset is None and self.raise_on_no_answer: + raise NoAnswer(response=answer.response) + else: + return (None, answer) + answer = self.resolver.cache.get( + (self.qname, dns.rdatatype.ANY, self.rdclass) + ) + if answer is not None and answer.response.rcode() == dns.rcode.NXDOMAIN: + # cached NXDOMAIN; record it and continue to next + # name. + self.nxdomain_responses[self.qname] = answer.response + continue + + # Build the request + request = dns.message.make_query(self.qname, self.rdtype, self.rdclass) + if self.resolver.keyname is not None: + request.use_tsig( + self.resolver.keyring, + self.resolver.keyname, + algorithm=self.resolver.keyalgorithm, + ) + request.use_edns( + self.resolver.edns, + self.resolver.ednsflags, + self.resolver.payload, + options=self.resolver.ednsoptions, + ) + if self.resolver.flags is not None: + request.flags = self.resolver.flags + + self.nameservers = self.resolver._enrich_nameservers( + self.resolver._nameservers, + self.resolver.nameserver_ports, + self.resolver.port, + ) + if self.resolver.rotate: + random.shuffle(self.nameservers) + self.current_nameservers = self.nameservers[:] + self.errors = [] + self.nameserver = None + self.tcp_attempt = False + self.retry_with_tcp = False + self.request = request + self.backoff = 0.10 + + return (request, None) + + # + # We've tried everything and only gotten NXDOMAINs. (We know + # it's only NXDOMAINs as anything else would have returned + # before now.) + # + raise NXDOMAIN(qnames=self.qnames_to_try, responses=self.nxdomain_responses) + + def next_nameserver(self) -> Tuple[dns.nameserver.Nameserver, bool, float]: + if self.retry_with_tcp: + assert self.nameserver is not None + assert not self.nameserver.is_always_max_size() + self.tcp_attempt = True + self.retry_with_tcp = False + return (self.nameserver, True, 0) + + backoff = 0.0 + if not self.current_nameservers: + if len(self.nameservers) == 0: + # Out of things to try! + raise NoNameservers(request=self.request, errors=self.errors) + self.current_nameservers = self.nameservers[:] + backoff = self.backoff + self.backoff = min(self.backoff * 2, 2) + + self.nameserver = self.current_nameservers.pop(0) + self.tcp_attempt = self.tcp or self.nameserver.is_always_max_size() + return (self.nameserver, self.tcp_attempt, backoff) + + def query_result( + self, response: Optional[dns.message.Message], ex: Optional[Exception] + ) -> Tuple[Optional[Answer], bool]: + # + # returns an (answer: Answer, end_loop: bool) tuple. + # + assert self.nameserver is not None + if ex: + # Exception during I/O or from_wire() + assert response is None + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + ex, + response, + ) + ) + if ( + isinstance(ex, dns.exception.FormError) + or isinstance(ex, EOFError) + or isinstance(ex, OSError) + or isinstance(ex, NotImplementedError) + ): + # This nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + elif isinstance(ex, dns.message.Truncated): + if self.tcp_attempt: + # Truncation with TCP is no good! + self.nameservers.remove(self.nameserver) + else: + self.retry_with_tcp = True + return (None, False) + # We got an answer! + assert response is not None + assert isinstance(response, dns.message.QueryMessage) + rcode = response.rcode() + if rcode == dns.rcode.NOERROR: + try: + answer = Answer( + self.qname, + self.rdtype, + self.rdclass, + response, + self.nameserver.answer_nameserver(), + self.nameserver.answer_port(), + ) + except Exception as e: + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + e, + response, + ) + ) + # The nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + return (None, False) + if self.resolver.cache: + self.resolver.cache.put((self.qname, self.rdtype, self.rdclass), answer) + if answer.rrset is None and self.raise_on_no_answer: + raise NoAnswer(response=answer.response) + return (answer, True) + elif rcode == dns.rcode.NXDOMAIN: + # Further validate the response by making an Answer, even + # if we aren't going to cache it. + try: + answer = Answer( + self.qname, dns.rdatatype.ANY, dns.rdataclass.IN, response + ) + except Exception as e: + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + e, + response, + ) + ) + # The nameserver is no good, take it out of the mix. + self.nameservers.remove(self.nameserver) + return (None, False) + self.nxdomain_responses[self.qname] = response + if self.resolver.cache: + self.resolver.cache.put( + (self.qname, dns.rdatatype.ANY, self.rdclass), answer + ) + # Make next_nameserver() return None, so caller breaks its + # inner loop and calls next_request(). + return (None, True) + elif rcode == dns.rcode.YXDOMAIN: + yex = YXDOMAIN() + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + yex, + response, + ) + ) + raise yex + else: + # + # We got a response, but we're not happy with the + # rcode in it. + # + if rcode != dns.rcode.SERVFAIL or not self.resolver.retry_servfail: + self.nameservers.remove(self.nameserver) + self.errors.append( + ( + str(self.nameserver), + self.tcp_attempt, + self.nameserver.answer_port(), + dns.rcode.to_text(rcode), + response, + ) + ) + return (None, False) + + +class BaseResolver: + """DNS stub resolver.""" + + # We initialize in reset() + # + # pylint: disable=attribute-defined-outside-init + + domain: dns.name.Name + nameserver_ports: Dict[str, int] + port: int + search: List[dns.name.Name] + use_search_by_default: bool + timeout: float + lifetime: float + keyring: Optional[Any] + keyname: Optional[Union[dns.name.Name, str]] + keyalgorithm: Union[dns.name.Name, str] + edns: int + ednsflags: int + ednsoptions: Optional[List[dns.edns.Option]] + payload: int + cache: Any + flags: Optional[int] + retry_servfail: bool + rotate: bool + ndots: Optional[int] + _nameservers: Sequence[Union[str, dns.nameserver.Nameserver]] + + def __init__( + self, filename: str = "/etc/resolv.conf", configure: bool = True + ) -> None: + """*filename*, a ``str`` or file object, specifying a file + in standard /etc/resolv.conf format. This parameter is meaningful + only when *configure* is true and the platform is POSIX. + + *configure*, a ``bool``. If True (the default), the resolver + instance is configured in the normal fashion for the operating + system the resolver is running on. (I.e. by reading a + /etc/resolv.conf file on POSIX systems and from the registry + on Windows systems.) + """ + + self.reset() + if configure: + if sys.platform == "win32": + self.read_registry() + elif filename: + self.read_resolv_conf(filename) + + def reset(self) -> None: + """Reset all resolver configuration to the defaults.""" + + self.domain = dns.name.Name(dns.name.from_text(socket.gethostname())[1:]) + if len(self.domain) == 0: + self.domain = dns.name.root + self._nameservers = [] + self.nameserver_ports = {} + self.port = 53 + self.search = [] + self.use_search_by_default = False + self.timeout = 2.0 + self.lifetime = 5.0 + self.keyring = None + self.keyname = None + self.keyalgorithm = dns.tsig.default_algorithm + self.edns = -1 + self.ednsflags = 0 + self.ednsoptions = None + self.payload = 0 + self.cache = None + self.flags = None + self.retry_servfail = False + self.rotate = False + self.ndots = None + + def read_resolv_conf(self, f: Any) -> None: + """Process *f* as a file in the /etc/resolv.conf format. If f is + a ``str``, it is used as the name of the file to open; otherwise it + is treated as the file itself. + + Interprets the following items: + + - nameserver - name server IP address + + - domain - local domain name + + - search - search list for host-name lookup + + - options - supported options are rotate, timeout, edns0, and ndots + + """ + + nameservers = [] + if isinstance(f, str): + try: + cm: contextlib.AbstractContextManager = open(f) + except OSError: + # /etc/resolv.conf doesn't exist, can't be read, etc. + raise NoResolverConfiguration(f"cannot open {f}") + else: + cm = contextlib.nullcontext(f) + with cm as f: + for l in f: + if len(l) == 0 or l[0] == "#" or l[0] == ";": + continue + tokens = l.split() + + # Any line containing less than 2 tokens is malformed + if len(tokens) < 2: + continue + + if tokens[0] == "nameserver": + nameservers.append(tokens[1]) + elif tokens[0] == "domain": + self.domain = dns.name.from_text(tokens[1]) + # domain and search are exclusive + self.search = [] + elif tokens[0] == "search": + # the last search wins + self.search = [] + for suffix in tokens[1:]: + self.search.append(dns.name.from_text(suffix)) + # We don't set domain as it is not used if + # len(self.search) > 0 + elif tokens[0] == "options": + for opt in tokens[1:]: + if opt == "rotate": + self.rotate = True + elif opt == "edns0": + self.use_edns() + elif "timeout" in opt: + try: + self.timeout = int(opt.split(":")[1]) + except (ValueError, IndexError): + pass + elif "ndots" in opt: + try: + self.ndots = int(opt.split(":")[1]) + except (ValueError, IndexError): + pass + if len(nameservers) == 0: + raise NoResolverConfiguration("no nameservers") + # Assigning directly instead of appending means we invoke the + # setter logic, with additonal checking and enrichment. + self.nameservers = nameservers + + def read_registry(self) -> None: + """Extract resolver configuration from the Windows registry.""" + try: + info = dns.win32util.get_dns_info() # type: ignore + if info.domain is not None: + self.domain = info.domain + self.nameservers = info.nameservers + self.search = info.search + except AttributeError: + raise NotImplementedError + + def _compute_timeout( + self, + start: float, + lifetime: Optional[float] = None, + errors: Optional[List[ErrorTuple]] = None, + ) -> float: + lifetime = self.lifetime if lifetime is None else lifetime + now = time.time() + duration = now - start + if errors is None: + errors = [] + if duration < 0: + if duration < -1: + # Time going backwards is bad. Just give up. + raise LifetimeTimeout(timeout=duration, errors=errors) + else: + # Time went backwards, but only a little. This can + # happen, e.g. under vmware with older linux kernels. + # Pretend it didn't happen. + duration = 0 + if duration >= lifetime: + raise LifetimeTimeout(timeout=duration, errors=errors) + return min(lifetime - duration, self.timeout) + + def _get_qnames_to_try( + self, qname: dns.name.Name, search: Optional[bool] + ) -> List[dns.name.Name]: + # This is a separate method so we can unit test the search + # rules without requiring the Internet. + if search is None: + search = self.use_search_by_default + qnames_to_try = [] + if qname.is_absolute(): + qnames_to_try.append(qname) + else: + abs_qname = qname.concatenate(dns.name.root) + if search: + if len(self.search) > 0: + # There is a search list, so use it exclusively + search_list = self.search[:] + elif self.domain != dns.name.root and self.domain is not None: + # We have some notion of a domain that isn't the root, so + # use it as the search list. + search_list = [self.domain] + else: + search_list = [] + # Figure out the effective ndots (default is 1) + if self.ndots is None: + ndots = 1 + else: + ndots = self.ndots + for suffix in search_list: + qnames_to_try.append(qname + suffix) + if len(qname) > ndots: + # The name has at least ndots dots, so we should try an + # absolute query first. + qnames_to_try.insert(0, abs_qname) + else: + # The name has less than ndots dots, so we should search + # first, then try the absolute name. + qnames_to_try.append(abs_qname) + else: + qnames_to_try.append(abs_qname) + return qnames_to_try + + def use_tsig( + self, + keyring: Any, + keyname: Optional[Union[dns.name.Name, str]] = None, + algorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, + ) -> None: + """Add a TSIG signature to each query. + + The parameters are passed to ``dns.message.Message.use_tsig()``; + see its documentation for details. + """ + + self.keyring = keyring + self.keyname = keyname + self.keyalgorithm = algorithm + + def use_edns( + self, + edns: Optional[Union[int, bool]] = 0, + ednsflags: int = 0, + payload: int = dns.message.DEFAULT_EDNS_PAYLOAD, + options: Optional[List[dns.edns.Option]] = None, + ) -> None: + """Configure EDNS behavior. + + *edns*, an ``int``, is the EDNS level to use. Specifying + ``None``, ``False``, or ``-1`` means "do not use EDNS", and in this case + the other parameters are ignored. Specifying ``True`` is + equivalent to specifying 0, i.e. "use EDNS0". + + *ednsflags*, an ``int``, the EDNS flag values. + + *payload*, an ``int``, is the EDNS sender's payload field, which is the + maximum size of UDP datagram the sender can handle. I.e. how big + a response to this message can be. + + *options*, a list of ``dns.edns.Option`` objects or ``None``, the EDNS + options. + """ + + if edns is None or edns is False: + edns = -1 + elif edns is True: + edns = 0 + self.edns = edns + self.ednsflags = ednsflags + self.payload = payload + self.ednsoptions = options + + def set_flags(self, flags: int) -> None: + """Overrides the default flags with your own. + + *flags*, an ``int``, the message flags to use. + """ + + self.flags = flags + + @classmethod + def _enrich_nameservers( + cls, + nameservers: Sequence[Union[str, dns.nameserver.Nameserver]], + nameserver_ports: Dict[str, int], + default_port: int, + ) -> List[dns.nameserver.Nameserver]: + enriched_nameservers = [] + if isinstance(nameservers, list): + for nameserver in nameservers: + enriched_nameserver: dns.nameserver.Nameserver + if isinstance(nameserver, dns.nameserver.Nameserver): + enriched_nameserver = nameserver + elif dns.inet.is_address(nameserver): + port = nameserver_ports.get(nameserver, default_port) + enriched_nameserver = dns.nameserver.Do53Nameserver( + nameserver, port + ) + else: + try: + if urlparse(nameserver).scheme != "https": + raise NotImplementedError + except Exception: + raise ValueError( + f"nameserver {nameserver} is not a " + "dns.nameserver.Nameserver instance or text form, " + "IP address, nor a valid https URL" + ) + enriched_nameserver = dns.nameserver.DoHNameserver(nameserver) + enriched_nameservers.append(enriched_nameserver) + else: + raise ValueError( + "nameservers must be a list or tuple (not a {})".format( + type(nameservers) + ) + ) + return enriched_nameservers + + @property + def nameservers( + self, + ) -> Sequence[Union[str, dns.nameserver.Nameserver]]: + return self._nameservers + + @nameservers.setter + def nameservers( + self, nameservers: Sequence[Union[str, dns.nameserver.Nameserver]] + ) -> None: + """ + *nameservers*, a ``list`` of nameservers, where a nameserver is either + a string interpretable as a nameserver, or a ``dns.nameserver.Nameserver`` + instance. + + Raises ``ValueError`` if *nameservers* is not a list of nameservers. + """ + # We just call _enrich_nameservers() for checking + self._enrich_nameservers(nameservers, self.nameserver_ports, self.port) + self._nameservers = nameservers + + +class Resolver(BaseResolver): + """DNS stub resolver.""" + + def resolve( + self, + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + ) -> Answer: # pylint: disable=arguments-differ + """Query nameservers to find the answer to the question. + + The *qname*, *rdtype*, and *rdclass* parameters may be objects + of the appropriate type, or strings that can be converted into objects + of the appropriate type. + + *qname*, a ``dns.name.Name`` or ``str``, the query name. + + *rdtype*, an ``int`` or ``str``, the query type. + + *rdclass*, an ``int`` or ``str``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *source*, a ``str`` or ``None``. If not ``None``, bind to this IP + address when making queries. + + *raise_on_no_answer*, a ``bool``. If ``True``, raise + ``dns.resolver.NoAnswer`` if there's no answer to the question. + + *source_port*, an ``int``, the port from which to send the message. + + *lifetime*, a ``float``, how many seconds a query should run + before timing out. + + *search*, a ``bool`` or ``None``, determines whether the + search list configured in the system's resolver configuration + are used for relative names, and whether the resolver's domain + may be added to relative names. The default is ``None``, + which causes the value of the resolver's + ``use_search_by_default`` attribute to be used. + + Raises ``dns.resolver.LifetimeTimeout`` if no answers could be found + in the specified lifetime. + + Raises ``dns.resolver.NXDOMAIN`` if the query name does not exist. + + Raises ``dns.resolver.YXDOMAIN`` if the query name is too long after + DNAME substitution. + + Raises ``dns.resolver.NoAnswer`` if *raise_on_no_answer* is + ``True`` and the query name exists but has no RRset of the + desired type and class. + + Raises ``dns.resolver.NoNameservers`` if no non-broken + nameservers are available to answer the question. + + Returns a ``dns.resolver.Answer`` instance. + + """ + + resolution = _Resolution( + self, qname, rdtype, rdclass, tcp, raise_on_no_answer, search + ) + start = time.time() + while True: + (request, answer) = resolution.next_request() + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + # cache hit! + return answer + assert request is not None # needed for type checking + done = False + while not done: + (nameserver, tcp, backoff) = resolution.next_nameserver() + if backoff: + time.sleep(backoff) + timeout = self._compute_timeout(start, lifetime, resolution.errors) + try: + response = nameserver.query( + request, + timeout=timeout, + source=source, + source_port=source_port, + max_size=tcp, + ) + except Exception as ex: + (_, done) = resolution.query_result(None, ex) + continue + (answer, done) = resolution.query_result(response, None) + # Note we need to say "if answer is not None" and not just + # "if answer" because answer implements __len__, and python + # will call that. We want to return if we have an answer + # object, including in cases where its length is 0. + if answer is not None: + return answer + + def query( + self, + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + ) -> Answer: # pragma: no cover + """Query nameservers to find the answer to the question. + + This method calls resolve() with ``search=True``, and is + provided for backwards compatibility with prior versions of + dnspython. See the documentation for the resolve() method for + further details. + """ + warnings.warn( + "please use dns.resolver.Resolver.resolve() instead", + DeprecationWarning, + stacklevel=2, + ) + return self.resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + True, + ) + + def resolve_address(self, ipaddr: str, *args: Any, **kwargs: Any) -> Answer: + """Use a resolver to run a reverse query for PTR records. + + This utilizes the resolve() method to perform a PTR lookup on the + specified IP address. + + *ipaddr*, a ``str``, the IPv4 or IPv6 address you want to get + the PTR record for. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs["rdtype"] = dns.rdatatype.PTR + modified_kwargs["rdclass"] = dns.rdataclass.IN + return self.resolve( + dns.reversename.from_address(ipaddr), *args, **modified_kwargs + ) + + def resolve_name( + self, + name: Union[dns.name.Name, str], + family: int = socket.AF_UNSPEC, + **kwargs: Any, + ) -> HostAnswers: + """Use a resolver to query for address records. + + This utilizes the resolve() method to perform A and/or AAAA lookups on + the specified name. + + *qname*, a ``dns.name.Name`` or ``str``, the name to resolve. + + *family*, an ``int``, the address family. If socket.AF_UNSPEC + (the default), both A and AAAA records will be retrieved. + + All other arguments that can be passed to the resolve() function + except for rdtype and rdclass are also supported by this + function. + """ + # We make a modified kwargs for type checking happiness, as otherwise + # we get a legit warning about possibly having rdtype and rdclass + # in the kwargs more than once. + modified_kwargs: Dict[str, Any] = {} + modified_kwargs.update(kwargs) + modified_kwargs.pop("rdtype", None) + modified_kwargs["rdclass"] = dns.rdataclass.IN + + if family == socket.AF_INET: + v4 = self.resolve(name, dns.rdatatype.A, **modified_kwargs) + return HostAnswers.make(v4=v4) + elif family == socket.AF_INET6: + v6 = self.resolve(name, dns.rdatatype.AAAA, **modified_kwargs) + return HostAnswers.make(v6=v6) + elif family != socket.AF_UNSPEC: + raise NotImplementedError(f"unknown address family {family}") + + raise_on_no_answer = modified_kwargs.pop("raise_on_no_answer", True) + lifetime = modified_kwargs.pop("lifetime", None) + start = time.time() + v6 = self.resolve( + name, + dns.rdatatype.AAAA, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + # Note that setting name ensures we query the same name + # for A as we did for AAAA. (This is just in case search lists + # are active by default in the resolver configuration and + # we might be talking to a server that says NXDOMAIN when it + # wants to say NOERROR no data. + name = v6.qname + v4 = self.resolve( + name, + dns.rdatatype.A, + raise_on_no_answer=False, + lifetime=self._compute_timeout(start, lifetime), + **modified_kwargs, + ) + answers = HostAnswers.make(v6=v6, v4=v4, add_empty=not raise_on_no_answer) + if not answers: + raise NoAnswer(response=v6.response) + return answers + + # pylint: disable=redefined-outer-name + + def canonical_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: + """Determine the canonical name of *name*. + + The canonical name is the name the resolver uses for queries + after all CNAME and DNAME renamings have been applied. + + *name*, a ``dns.name.Name`` or ``str``, the query name. + + This method can raise any exception that ``resolve()`` can + raise, other than ``dns.resolver.NoAnswer`` and + ``dns.resolver.NXDOMAIN``. + + Returns a ``dns.name.Name``. + """ + try: + answer = self.resolve(name, raise_on_no_answer=False) + canonical_name = answer.canonical_name + except dns.resolver.NXDOMAIN as e: + canonical_name = e.canonical_name + return canonical_name + + # pylint: enable=redefined-outer-name + + def try_ddr(self, lifetime: float = 5.0) -> None: + """Try to update the resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + *lifetime*, a float, is the maximum time to spend attempting DDR. The default + is 5 seconds. + + If the SVCB query is successful and results in a non-empty list of nameservers, + then the resolver's nameservers are set to the returned servers in priority + order. + + The current implementation does not use any address hints from the SVCB record, + nor does it resolve addresses for the SCVB target name, rather it assumes that + the bootstrap nameserver will always be one of the addresses and uses it. + A future revision to the code may offer fuller support. The code verifies that + the bootstrap nameserver is in the Subject Alternative Name field of the + TLS certficate. + """ + try: + expiration = time.time() + lifetime + answer = self.resolve( + dns._ddr._local_resolver_name, "SVCB", lifetime=lifetime + ) + timeout = dns.query._remaining(expiration) + nameservers = dns._ddr._get_nameservers_sync(answer, timeout) + if len(nameservers) > 0: + self.nameservers = nameservers + except Exception: + pass + + +#: The default resolver. +default_resolver: Optional[Resolver] = None + + +def get_default_resolver() -> Resolver: + """Get the default resolver, initializing it if necessary.""" + if default_resolver is None: + reset_default_resolver() + assert default_resolver is not None + return default_resolver + + +def reset_default_resolver() -> None: + """Re-initialize default resolver. + + Note that the resolver configuration (i.e. /etc/resolv.conf on UNIX + systems) will be re-read immediately. + """ + + global default_resolver + default_resolver = Resolver() + + +def resolve( + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, +) -> Answer: # pragma: no cover + """Query nameservers to find the answer to the question. + + This is a convenience function that uses the default resolver + object to make the query. + + See ``dns.resolver.Resolver.resolve`` for more information on the + parameters. + """ + + return get_default_resolver().resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + ) + + +def query( + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, +) -> Answer: # pragma: no cover + """Query nameservers to find the answer to the question. + + This method calls resolve() with ``search=True``, and is + provided for backwards compatibility with prior versions of + dnspython. See the documentation for the resolve() method for + further details. + """ + warnings.warn( + "please use dns.resolver.resolve() instead", DeprecationWarning, stacklevel=2 + ) + return resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + True, + ) + + +def resolve_address(ipaddr: str, *args: Any, **kwargs: Any) -> Answer: + """Use a resolver to run a reverse query for PTR records. + + See ``dns.resolver.Resolver.resolve_address`` for more information on the + parameters. + """ + + return get_default_resolver().resolve_address(ipaddr, *args, **kwargs) + + +def resolve_name( + name: Union[dns.name.Name, str], family: int = socket.AF_UNSPEC, **kwargs: Any +) -> HostAnswers: + """Use a resolver to query for address records. + + See ``dns.resolver.Resolver.resolve_name`` for more information on the + parameters. + """ + + return get_default_resolver().resolve_name(name, family, **kwargs) + + +def canonical_name(name: Union[dns.name.Name, str]) -> dns.name.Name: + """Determine the canonical name of *name*. + + See ``dns.resolver.Resolver.canonical_name`` for more information on the + parameters and possible exceptions. + """ + + return get_default_resolver().canonical_name(name) + + +def try_ddr(lifetime: float = 5.0) -> None: + """Try to update the default resolver's nameservers using Discovery of Designated + Resolvers (DDR). If successful, the resolver will subsequently use + DNS-over-HTTPS or DNS-over-TLS for future queries. + + See :py:func:`dns.resolver.Resolver.try_ddr` for more information. + """ + return get_default_resolver().try_ddr(lifetime) + + +def zone_for_name( + name: Union[dns.name.Name, str], + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + tcp: bool = False, + resolver: Optional[Resolver] = None, + lifetime: Optional[float] = None, +) -> dns.name.Name: + """Find the name of the zone which contains the specified name. + + *name*, an absolute ``dns.name.Name`` or ``str``, the query name. + + *rdclass*, an ``int``, the query class. + + *tcp*, a ``bool``. If ``True``, use TCP to make the query. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + If ``None``, the default, then the default resolver is used. + + *lifetime*, a ``float``, the total time to allow for the queries needed + to determine the zone. If ``None``, the default, then only the individual + query limits of the resolver apply. + + Raises ``dns.resolver.NoRootSOA`` if there is no SOA RR at the DNS + root. (This is only likely to happen if you're using non-default + root servers in your network and they are misconfigured.) + + Raises ``dns.resolver.LifetimeTimeout`` if the answer could not be + found in the allotted lifetime. + + Returns a ``dns.name.Name``. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, dns.name.root) + if resolver is None: + resolver = get_default_resolver() + if not name.is_absolute(): + raise NotAbsolute(name) + start = time.time() + expiration: Optional[float] + if lifetime is not None: + expiration = start + lifetime + else: + expiration = None + while 1: + try: + rlifetime: Optional[float] + if expiration is not None: + rlifetime = expiration - time.time() + if rlifetime <= 0: + rlifetime = 0 + else: + rlifetime = None + answer = resolver.resolve( + name, dns.rdatatype.SOA, rdclass, tcp, lifetime=rlifetime + ) + assert answer.rrset is not None + if answer.rrset.name == name: + return name + # otherwise we were CNAMEd or DNAMEd and need to look higher + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer) as e: + if isinstance(e, dns.resolver.NXDOMAIN): + response = e.responses().get(name) + else: + response = e.response() # pylint: disable=no-value-for-parameter + if response: + for rrs in response.authority: + if rrs.rdtype == dns.rdatatype.SOA and rrs.rdclass == rdclass: + (nr, _, _) = rrs.name.fullcompare(name) + if nr == dns.name.NAMERELN_SUPERDOMAIN: + # We're doing a proper superdomain check as + # if the name were equal we ought to have gotten + # it in the answer section! We are ignoring the + # possibility that the authority is insane and + # is including multiple SOA RRs for different + # authorities. + return rrs.name + # we couldn't extract anything useful from the response (e.g. it's + # a type 3 NXDOMAIN) + try: + name = name.parent() + except dns.name.NoParent: + raise NoRootSOA + + +def make_resolver_at( + where: Union[dns.name.Name, str], + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Optional[Resolver] = None, +) -> Resolver: + """Make a stub resolver using the specified destination as the full resolver. + + *where*, a ``dns.name.Name`` or ``str`` the domain name or IP address of the + full resolver. + + *port*, an ``int``, the port to use. If not specified, the default is 53. + + *family*, an ``int``, the address family to use. This parameter is used if + *where* is not an address. The default is ``socket.AF_UNSPEC`` in which case + the first address returned by ``resolve_name()`` will be used, otherwise the + first address of the specified family will be used. + + *resolver*, a ``dns.resolver.Resolver`` or ``None``, the resolver to use for + resolution of hostnames. If not specified, the default resolver will be used. + + Returns a ``dns.resolver.Resolver`` or raises an exception. + """ + if resolver is None: + resolver = get_default_resolver() + nameservers: List[Union[str, dns.nameserver.Nameserver]] = [] + if isinstance(where, str) and dns.inet.is_address(where): + nameservers.append(dns.nameserver.Do53Nameserver(where, port)) + else: + for address in resolver.resolve_name(where, family).addresses(): + nameservers.append(dns.nameserver.Do53Nameserver(address, port)) + res = dns.resolver.Resolver(configure=False) + res.nameservers = nameservers + return res + + +def resolve_at( + where: Union[dns.name.Name, str], + qname: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.A, + rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + tcp: bool = False, + source: Optional[str] = None, + raise_on_no_answer: bool = True, + source_port: int = 0, + lifetime: Optional[float] = None, + search: Optional[bool] = None, + port: int = 53, + family: int = socket.AF_UNSPEC, + resolver: Optional[Resolver] = None, +) -> Answer: + """Query nameservers to find the answer to the question. + + This is a convenience function that calls ``dns.resolver.make_resolver_at()`` to + make a resolver, and then uses it to resolve the query. + + See ``dns.resolver.Resolver.resolve`` for more information on the resolution + parameters, and ``dns.resolver.make_resolver_at`` for information about the resolver + parameters *where*, *port*, *family*, and *resolver*. + + If making more than one query, it is more efficient to call + ``dns.resolver.make_resolver_at()`` and then use that resolver for the queries + instead of calling ``resolve_at()`` multiple times. + """ + return make_resolver_at(where, port, family, resolver).resolve( + qname, + rdtype, + rdclass, + tcp, + source, + raise_on_no_answer, + source_port, + lifetime, + search, + ) + + +# +# Support for overriding the system resolver for all python code in the +# running process. +# + +_protocols_for_socktype = { + socket.SOCK_DGRAM: [socket.SOL_UDP], + socket.SOCK_STREAM: [socket.SOL_TCP], +} + +_resolver = None +_original_getaddrinfo = socket.getaddrinfo +_original_getnameinfo = socket.getnameinfo +_original_getfqdn = socket.getfqdn +_original_gethostbyname = socket.gethostbyname +_original_gethostbyname_ex = socket.gethostbyname_ex +_original_gethostbyaddr = socket.gethostbyaddr + + +def _getaddrinfo( + host=None, service=None, family=socket.AF_UNSPEC, socktype=0, proto=0, flags=0 +): + if flags & socket.AI_NUMERICHOST != 0: + # Short circuit directly into the system's getaddrinfo(). We're + # not adding any value in this case, and this avoids infinite loops + # because dns.query.* needs to call getaddrinfo() for IPv6 scoping + # reasons. We will also do this short circuit below if we + # discover that the host is an address literal. + return _original_getaddrinfo(host, service, family, socktype, proto, flags) + if flags & (socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) != 0: + # Not implemented. We raise a gaierror as opposed to a + # NotImplementedError as it helps callers handle errors more + # appropriately. [Issue #316] + # + # We raise EAI_FAIL as opposed to EAI_SYSTEM because there is + # no EAI_SYSTEM on Windows [Issue #416]. We didn't go for + # EAI_BADFLAGS as the flags aren't bad, we just don't + # implement them. + raise socket.gaierror( + socket.EAI_FAIL, "Non-recoverable failure in name resolution" + ) + if host is None and service is None: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + addrs = [] + canonical_name = None # pylint: disable=redefined-outer-name + # Is host None or an address literal? If so, use the system's + # getaddrinfo(). + if host is None: + return _original_getaddrinfo(host, service, family, socktype, proto, flags) + try: + # We don't care about the result of af_for_address(), we're just + # calling it so it raises an exception if host is not an IPv4 or + # IPv6 address. + dns.inet.af_for_address(host) + return _original_getaddrinfo(host, service, family, socktype, proto, flags) + except Exception: + pass + # Something needs resolution! + try: + answers = _resolver.resolve_name(host, family) + addrs = answers.addresses_and_families() + canonical_name = answers.canonical_name().to_text(True) + except dns.resolver.NXDOMAIN: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + except Exception: + # We raise EAI_AGAIN here as the failure may be temporary + # (e.g. a timeout) and EAI_SYSTEM isn't defined on Windows. + # [Issue #416] + raise socket.gaierror(socket.EAI_AGAIN, "Temporary failure in name resolution") + port = None + try: + # Is it a port literal? + if service is None: + port = 0 + else: + port = int(service) + except Exception: + if flags & socket.AI_NUMERICSERV == 0: + try: + port = socket.getservbyname(service) + except Exception: + pass + if port is None: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + tuples = [] + if socktype == 0: + socktypes = [socket.SOCK_DGRAM, socket.SOCK_STREAM] + else: + socktypes = [socktype] + if flags & socket.AI_CANONNAME != 0: + cname = canonical_name + else: + cname = "" + for addr, af in addrs: + for socktype in socktypes: + for proto in _protocols_for_socktype[socktype]: + addr_tuple = dns.inet.low_level_address_tuple((addr, port), af) + tuples.append((af, socktype, proto, cname, addr_tuple)) + if len(tuples) == 0: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + return tuples + + +def _getnameinfo(sockaddr, flags=0): + host = sockaddr[0] + port = sockaddr[1] + if len(sockaddr) == 4: + scope = sockaddr[3] + family = socket.AF_INET6 + else: + scope = None + family = socket.AF_INET + tuples = _getaddrinfo(host, port, family, socket.SOCK_STREAM, socket.SOL_TCP, 0) + if len(tuples) > 1: + raise socket.error("sockaddr resolved to multiple addresses") + addr = tuples[0][4][0] + if flags & socket.NI_DGRAM: + pname = "udp" + else: + pname = "tcp" + qname = dns.reversename.from_address(addr) + if flags & socket.NI_NUMERICHOST == 0: + try: + answer = _resolver.resolve(qname, "PTR") + hostname = answer.rrset[0].target.to_text(True) + except (dns.resolver.NXDOMAIN, dns.resolver.NoAnswer): + if flags & socket.NI_NAMEREQD: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + hostname = addr + if scope is not None: + hostname += "%" + str(scope) + else: + hostname = addr + if scope is not None: + hostname += "%" + str(scope) + if flags & socket.NI_NUMERICSERV: + service = str(port) + else: + service = socket.getservbyport(port, pname) + return (hostname, service) + + +def _getfqdn(name=None): + if name is None: + name = socket.gethostname() + try: + (name, _, _) = _gethostbyaddr(name) + # Python's version checks aliases too, but our gethostbyname + # ignores them, so we do so here as well. + except Exception: + pass + return name + + +def _gethostbyname(name): + return _gethostbyname_ex(name)[2][0] + + +def _gethostbyname_ex(name): + aliases = [] + addresses = [] + tuples = _getaddrinfo( + name, 0, socket.AF_INET, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME + ) + canonical = tuples[0][3] + for item in tuples: + addresses.append(item[4][0]) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def _gethostbyaddr(ip): + try: + dns.ipv6.inet_aton(ip) + sockaddr = (ip, 80, 0, 0) + family = socket.AF_INET6 + except Exception: + try: + dns.ipv4.inet_aton(ip) + except Exception: + raise socket.gaierror(socket.EAI_NONAME, "Name or service not known") + sockaddr = (ip, 80) + family = socket.AF_INET + (name, _) = _getnameinfo(sockaddr, socket.NI_NAMEREQD) + aliases = [] + addresses = [] + tuples = _getaddrinfo( + name, 0, family, socket.SOCK_STREAM, socket.SOL_TCP, socket.AI_CANONNAME + ) + canonical = tuples[0][3] + # We only want to include an address from the tuples if it's the + # same as the one we asked about. We do this comparison in binary + # to avoid any differences in text representations. + bin_ip = dns.inet.inet_pton(family, ip) + for item in tuples: + addr = item[4][0] + bin_addr = dns.inet.inet_pton(family, addr) + if bin_ip == bin_addr: + addresses.append(addr) + # XXX we just ignore aliases + return (canonical, aliases, addresses) + + +def override_system_resolver(resolver: Optional[Resolver] = None) -> None: + """Override the system resolver routines in the socket module with + versions which use dnspython's resolver. + + This can be useful in testing situations where you want to control + the resolution behavior of python code without having to change + the system's resolver settings (e.g. /etc/resolv.conf). + + The resolver to use may be specified; if it's not, the default + resolver will be used. + + resolver, a ``dns.resolver.Resolver`` or ``None``, the resolver to use. + """ + + if resolver is None: + resolver = get_default_resolver() + global _resolver + _resolver = resolver + socket.getaddrinfo = _getaddrinfo + socket.getnameinfo = _getnameinfo + socket.getfqdn = _getfqdn + socket.gethostbyname = _gethostbyname + socket.gethostbyname_ex = _gethostbyname_ex + socket.gethostbyaddr = _gethostbyaddr + + +def restore_system_resolver() -> None: + """Undo the effects of prior override_system_resolver().""" + + global _resolver + _resolver = None + socket.getaddrinfo = _original_getaddrinfo + socket.getnameinfo = _original_getnameinfo + socket.getfqdn = _original_getfqdn + socket.gethostbyname = _original_gethostbyname + socket.gethostbyname_ex = _original_gethostbyname_ex + socket.gethostbyaddr = _original_gethostbyaddr diff --git a/backend/test/lib/python3.8/site-packages/dns/reversename.py b/backend/test/lib/python3.8/site-packages/dns/reversename.py new file mode 100644 index 0000000000000000000000000000000000000000..8236c711f16f1e3b514f182a8254cb0e0ce45a68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/reversename.py @@ -0,0 +1,105 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2006-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Reverse Map Names.""" + +import binascii + +import dns.ipv4 +import dns.ipv6 +import dns.name + +ipv4_reverse_domain = dns.name.from_text("in-addr.arpa.") +ipv6_reverse_domain = dns.name.from_text("ip6.arpa.") + + +def from_address( + text: str, + v4_origin: dns.name.Name = ipv4_reverse_domain, + v6_origin: dns.name.Name = ipv6_reverse_domain, +) -> dns.name.Name: + """Convert an IPv4 or IPv6 address in textual form into a Name object whose + value is the reverse-map domain name of the address. + + *text*, a ``str``, is an IPv4 or IPv6 address in textual form + (e.g. '127.0.0.1', '::1') + + *v4_origin*, a ``dns.name.Name`` to append to the labels corresponding to + the address if the address is an IPv4 address, instead of the default + (in-addr.arpa.) + + *v6_origin*, a ``dns.name.Name`` to append to the labels corresponding to + the address if the address is an IPv6 address, instead of the default + (ip6.arpa.) + + Raises ``dns.exception.SyntaxError`` if the address is badly formed. + + Returns a ``dns.name.Name``. + """ + + try: + v6 = dns.ipv6.inet_aton(text) + if dns.ipv6.is_mapped(v6): + parts = ["%d" % byte for byte in v6[12:]] + origin = v4_origin + else: + parts = [x for x in str(binascii.hexlify(v6).decode())] + origin = v6_origin + except Exception: + parts = ["%d" % byte for byte in dns.ipv4.inet_aton(text)] + origin = v4_origin + return dns.name.from_text(".".join(reversed(parts)), origin=origin) + + +def to_address( + name: dns.name.Name, + v4_origin: dns.name.Name = ipv4_reverse_domain, + v6_origin: dns.name.Name = ipv6_reverse_domain, +) -> str: + """Convert a reverse map domain name into textual address form. + + *name*, a ``dns.name.Name``, an IPv4 or IPv6 address in reverse-map name + form. + + *v4_origin*, a ``dns.name.Name`` representing the top-level domain for + IPv4 addresses, instead of the default (in-addr.arpa.) + + *v6_origin*, a ``dns.name.Name`` representing the top-level domain for + IPv4 addresses, instead of the default (ip6.arpa.) + + Raises ``dns.exception.SyntaxError`` if the name does not have a + reverse-map form. + + Returns a ``str``. + """ + + if name.is_subdomain(v4_origin): + name = name.relativize(v4_origin) + text = b".".join(reversed(name.labels)) + # run through inet_ntoa() to check syntax and make pretty. + return dns.ipv4.inet_ntoa(dns.ipv4.inet_aton(text)) + elif name.is_subdomain(v6_origin): + name = name.relativize(v6_origin) + labels = list(reversed(name.labels)) + parts = [] + for i in range(0, len(labels), 4): + parts.append(b"".join(labels[i : i + 4])) + text = b":".join(parts) + # run through inet_ntoa() to check syntax and make pretty. + return dns.ipv6.inet_ntoa(dns.ipv6.inet_aton(text)) + else: + raise dns.exception.SyntaxError("unknown reverse-map address family") diff --git a/backend/test/lib/python3.8/site-packages/dns/rrset.py b/backend/test/lib/python3.8/site-packages/dns/rrset.py new file mode 100644 index 0000000000000000000000000000000000000000..350de13e3be21f1e5117a49bed800d932b952f87 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/rrset.py @@ -0,0 +1,286 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS RRsets (an RRset is a named rdataset)""" + +from typing import Any, Collection, Dict, Optional, Union, cast + +import dns.name +import dns.rdataclass +import dns.rdataset +import dns.renderer + + +class RRset(dns.rdataset.Rdataset): + + """A DNS RRset (named rdataset). + + RRset inherits from Rdataset, and RRsets can be treated as + Rdatasets in most cases. There are, however, a few notable + exceptions. RRsets have different to_wire() and to_text() method + arguments, reflecting the fact that RRsets always have an owner + name. + """ + + __slots__ = ["name", "deleting"] + + def __init__( + self, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + deleting: Optional[dns.rdataclass.RdataClass] = None, + ): + """Create a new RRset.""" + + super().__init__(rdclass, rdtype, covers) + self.name = name + self.deleting = deleting + + def _clone(self): + obj = super()._clone() + obj.name = self.name + obj.deleting = self.deleting + return obj + + def __repr__(self): + if self.covers == 0: + ctext = "" + else: + ctext = "(" + dns.rdatatype.to_text(self.covers) + ")" + if self.deleting is not None: + dtext = " delete=" + dns.rdataclass.to_text(self.deleting) + else: + dtext = "" + return ( + "<DNS " + + str(self.name) + + " " + + dns.rdataclass.to_text(self.rdclass) + + " " + + dns.rdatatype.to_text(self.rdtype) + + ctext + + dtext + + " RRset: " + + self._rdata_repr() + + ">" + ) + + def __str__(self): + return self.to_text() + + def __eq__(self, other): + if isinstance(other, RRset): + if self.name != other.name: + return False + elif not isinstance(other, dns.rdataset.Rdataset): + return False + return super().__eq__(other) + + def match(self, *args: Any, **kwargs: Any) -> bool: # type: ignore[override] + """Does this rrset match the specified attributes? + + Behaves as :py:func:`full_match()` if the first argument is a + ``dns.name.Name``, and as :py:func:`dns.rdataset.Rdataset.match()` + otherwise. + + (This behavior fixes a design mistake where the signature of this + method became incompatible with that of its superclass. The fix + makes RRsets matchable as Rdatasets while preserving backwards + compatibility.) + """ + if isinstance(args[0], dns.name.Name): + return self.full_match(*args, **kwargs) # type: ignore[arg-type] + else: + return super().match(*args, **kwargs) # type: ignore[arg-type] + + def full_match( + self, + name: dns.name.Name, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + deleting: Optional[dns.rdataclass.RdataClass] = None, + ) -> bool: + """Returns ``True`` if this rrset matches the specified name, class, + type, covers, and deletion state. + """ + if not super().match(rdclass, rdtype, covers): + return False + if self.name != name or self.deleting != deleting: + return False + return True + + # pylint: disable=arguments-differ + + def to_text( # type: ignore[override] + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + **kw: Dict[str, Any] + ) -> str: + """Convert the RRset into DNS zone file format. + + See ``dns.name.Name.choose_relativity`` for more information + on how *origin* and *relativize* determine the way names + are emitted. + + Any additional keyword arguments are passed on to the rdata + ``to_text()`` method. + + *origin*, a ``dns.name.Name`` or ``None``, the origin for relative + names. + + *relativize*, a ``bool``. If ``True``, names will be relativized + to *origin*. + """ + + return super().to_text( + self.name, origin, relativize, self.deleting, **kw # type: ignore + ) + + def to_wire( # type: ignore[override] + self, + file: Any, + compress: Optional[dns.name.CompressType] = None, # type: ignore + origin: Optional[dns.name.Name] = None, + **kw: Dict[str, Any] + ) -> int: + """Convert the RRset to wire format. + + All keyword arguments are passed to ``dns.rdataset.to_wire()``; see + that function for details. + + Returns an ``int``, the number of records emitted. + """ + + return super().to_wire( + self.name, file, compress, origin, self.deleting, **kw # type:ignore + ) + + # pylint: enable=arguments-differ + + def to_rdataset(self) -> dns.rdataset.Rdataset: + """Convert an RRset into an Rdataset. + + Returns a ``dns.rdataset.Rdataset``. + """ + return dns.rdataset.from_rdata_list(self.ttl, list(self)) + + +def from_text_list( + name: Union[dns.name.Name, str], + ttl: int, + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + text_rdatas: Collection[str], + idna_codec: Optional[dns.name.IDNACodec] = None, + origin: Optional[dns.name.Name] = None, + relativize: bool = True, + relativize_to: Optional[dns.name.Name] = None, +) -> RRset: + """Create an RRset with the specified name, TTL, class, and type, and with + the specified list of rdatas in text format. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + *origin*, a ``dns.name.Name`` (or ``None``), the + origin to use for relative names. + + *relativize*, a ``bool``. If true, name will be relativized. + + *relativize_to*, a ``dns.name.Name`` (or ``None``), the origin to use + when relativizing names. If not set, the *origin* value will be used. + + Returns a ``dns.rrset.RRset`` object. + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + rdclass = dns.rdataclass.RdataClass.make(rdclass) + rdtype = dns.rdatatype.RdataType.make(rdtype) + r = RRset(name, rdclass, rdtype) + r.update_ttl(ttl) + for t in text_rdatas: + rd = dns.rdata.from_text( + r.rdclass, r.rdtype, t, origin, relativize, relativize_to, idna_codec + ) + r.add(rd) + return r + + +def from_text( + name: Union[dns.name.Name, str], + ttl: int, + rdclass: Union[dns.rdataclass.RdataClass, str], + rdtype: Union[dns.rdatatype.RdataType, str], + *text_rdatas: Any +) -> RRset: + """Create an RRset with the specified name, TTL, class, and type and with + the specified rdatas in text format. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_text_list( + name, ttl, rdclass, rdtype, cast(Collection[str], text_rdatas) + ) + + +def from_rdata_list( + name: Union[dns.name.Name, str], + ttl: int, + rdatas: Collection[dns.rdata.Rdata], + idna_codec: Optional[dns.name.IDNACodec] = None, +) -> RRset: + """Create an RRset with the specified name and TTL, and with + the specified list of rdata objects. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder to use; if ``None``, the default IDNA 2003 + encoder/decoder is used. + + Returns a ``dns.rrset.RRset`` object. + + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None, idna_codec=idna_codec) + + if len(rdatas) == 0: + raise ValueError("rdata list must not be empty") + r = None + for rd in rdatas: + if r is None: + r = RRset(name, rd.rdclass, rd.rdtype) + r.update_ttl(ttl) + r.add(rd) + assert r is not None + return r + + +def from_rdata(name: Union[dns.name.Name, str], ttl: int, *rdatas: Any) -> RRset: + """Create an RRset with the specified name and TTL, and with + the specified rdata objects. + + Returns a ``dns.rrset.RRset`` object. + """ + + return from_rdata_list(name, ttl, cast(Collection[dns.rdata.Rdata], rdatas)) diff --git a/backend/test/lib/python3.8/site-packages/dns/serial.py b/backend/test/lib/python3.8/site-packages/dns/serial.py new file mode 100644 index 0000000000000000000000000000000000000000..3417299be2bbb3726780f1ebf74bb16974cae308 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/serial.py @@ -0,0 +1,118 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""Serial Number Arthimetic from RFC 1982""" + + +class Serial: + def __init__(self, value: int, bits: int = 32): + self.value = value % 2**bits + self.bits = bits + + def __repr__(self): + return f"dns.serial.Serial({self.value}, {self.bits})" + + def __eq__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + return self.value == other.value + + def __ne__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + return self.value != other.value + + def __lt__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + if self.value < other.value and other.value - self.value < 2 ** (self.bits - 1): + return True + elif self.value > other.value and self.value - other.value > 2 ** ( + self.bits - 1 + ): + return True + else: + return False + + def __le__(self, other): + return self == other or self < other + + def __gt__(self, other): + if isinstance(other, int): + other = Serial(other, self.bits) + elif not isinstance(other, Serial) or other.bits != self.bits: + return NotImplemented + if self.value < other.value and other.value - self.value > 2 ** (self.bits - 1): + return True + elif self.value > other.value and self.value - other.value < 2 ** ( + self.bits - 1 + ): + return True + else: + return False + + def __ge__(self, other): + return self == other or self > other + + def __add__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v += delta + v = v % 2**self.bits + return Serial(v, self.bits) + + def __iadd__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v += delta + v = v % 2**self.bits + self.value = v + return self + + def __sub__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v -= delta + v = v % 2**self.bits + return Serial(v, self.bits) + + def __isub__(self, other): + v = self.value + if isinstance(other, Serial): + delta = other.value + elif isinstance(other, int): + delta = other + else: + raise ValueError + if abs(delta) > (2 ** (self.bits - 1) - 1): + raise ValueError + v -= delta + v = v % 2**self.bits + self.value = v + return self diff --git a/backend/test/lib/python3.8/site-packages/dns/set.py b/backend/test/lib/python3.8/site-packages/dns/set.py new file mode 100644 index 0000000000000000000000000000000000000000..fa50ed97cf387981bc43b8683aa6d76ecc910ecc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/set.py @@ -0,0 +1,308 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import itertools + + +class Set: + + """A simple set class. + + This class was originally used to deal with sets being missing in + ancient versions of python, but dnspython will continue to use it + as these sets are based on lists and are thus indexable, and this + ability is widely used in dnspython applications. + """ + + __slots__ = ["items"] + + def __init__(self, items=None): + """Initialize the set. + + *items*, an iterable or ``None``, the initial set of items. + """ + + self.items = dict() + if items is not None: + for item in items: + # This is safe for how we use set, but if other code + # subclasses it could be a legitimate issue. + self.add(item) # lgtm[py/init-calls-subclass] + + def __repr__(self): + return "dns.set.Set(%s)" % repr(list(self.items.keys())) + + def add(self, item): + """Add an item to the set.""" + + if item not in self.items: + self.items[item] = None + + def remove(self, item): + """Remove an item from the set.""" + + try: + del self.items[item] + except KeyError: + raise ValueError + + def discard(self, item): + """Remove an item from the set if present.""" + + self.items.pop(item, None) + + def pop(self): + """Remove an arbitrary item from the set.""" + (k, _) = self.items.popitem() + return k + + def _clone(self) -> "Set": + """Make a (shallow) copy of the set. + + There is a 'clone protocol' that subclasses of this class + should use. To make a copy, first call your super's _clone() + method, and use the object returned as the new instance. Then + make shallow copies of the attributes defined in the subclass. + + This protocol allows us to write the set algorithms that + return new instances (e.g. union) once, and keep using them in + subclasses. + """ + + if hasattr(self, "_clone_class"): + cls = self._clone_class # type: ignore + else: + cls = self.__class__ + obj = cls.__new__(cls) + obj.items = dict() + obj.items.update(self.items) + return obj + + def __copy__(self): + """Make a (shallow) copy of the set.""" + + return self._clone() + + def copy(self): + """Make a (shallow) copy of the set.""" + + return self._clone() + + def union_update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + return + for item in other.items: + self.add(item) + + def intersection_update(self, other): + """Update the set, removing any elements from other which are not + in both sets. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + return + # we make a copy of the list so that we can remove items from + # the list without breaking the iterator. + for item in list(self.items): + if item not in other.items: + del self.items[item] + + def difference_update(self, other): + """Update the set, removing any elements from other which are in + the set. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + self.items.clear() + else: + for item in other.items: + self.discard(item) + + def symmetric_difference_update(self, other): + """Update the set, retaining only elements unique to both sets.""" + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + if self is other: # lgtm[py/comparison-using-is] + self.items.clear() + else: + overlap = self.intersection(other) + self.union_update(other) + self.difference_update(overlap) + + def union(self, other): + """Return a new set which is the union of ``self`` and ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.union_update(other) + return obj + + def intersection(self, other): + """Return a new set which is the intersection of ``self`` and + ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.intersection_update(other) + return obj + + def difference(self, other): + """Return a new set which ``self`` - ``other``, i.e. the items + in ``self`` which are not also in ``other``. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.difference_update(other) + return obj + + def symmetric_difference(self, other): + """Return a new set which (``self`` - ``other``) | (``other`` + - ``self), ie: the items in either ``self`` or ``other`` which + are not contained in their intersection. + + Returns the same Set type as this set. + """ + + obj = self._clone() + obj.symmetric_difference_update(other) + return obj + + def __or__(self, other): + return self.union(other) + + def __and__(self, other): + return self.intersection(other) + + def __add__(self, other): + return self.union(other) + + def __sub__(self, other): + return self.difference(other) + + def __xor__(self, other): + return self.symmetric_difference(other) + + def __ior__(self, other): + self.union_update(other) + return self + + def __iand__(self, other): + self.intersection_update(other) + return self + + def __iadd__(self, other): + self.union_update(other) + return self + + def __isub__(self, other): + self.difference_update(other) + return self + + def __ixor__(self, other): + self.symmetric_difference_update(other) + return self + + def update(self, other): + """Update the set, adding any elements from other which are not + already in the set. + + *other*, the collection of items with which to update the set, which + may be any iterable type. + """ + + for item in other: + self.add(item) + + def clear(self): + """Make the set empty.""" + self.items.clear() + + def __eq__(self, other): + return self.items == other.items + + def __ne__(self, other): + return not self.__eq__(other) + + def __len__(self): + return len(self.items) + + def __iter__(self): + return iter(self.items) + + def __getitem__(self, i): + if isinstance(i, slice): + return list(itertools.islice(self.items, i.start, i.stop, i.step)) + else: + return next(itertools.islice(self.items, i, i + 1)) + + def __delitem__(self, i): + if isinstance(i, slice): + for elt in list(self[i]): + del self.items[elt] + else: + del self.items[self[i]] + + def issubset(self, other): + """Is this set a subset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + for item in self.items: + if item not in other.items: + return False + return True + + def issuperset(self, other): + """Is this set a superset of *other*? + + Returns a ``bool``. + """ + + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + for item in other.items: + if item not in self.items: + return False + return True + + def isdisjoint(self, other): + if not isinstance(other, Set): + raise ValueError("other must be a Set instance") + for item in other.items: + if item in self.items: + return False + return True diff --git a/backend/test/lib/python3.8/site-packages/dns/tokenizer.py b/backend/test/lib/python3.8/site-packages/dns/tokenizer.py new file mode 100644 index 0000000000000000000000000000000000000000..454cac4a85e609d3429df45cbdfcb4103bd19213 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/tokenizer.py @@ -0,0 +1,708 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Tokenize DNS zone file format""" + +import io +import sys +from typing import Any, List, Optional, Tuple + +import dns.exception +import dns.name +import dns.ttl + +_DELIMITERS = {" ", "\t", "\n", ";", "(", ")", '"'} +_QUOTING_DELIMITERS = {'"'} + +EOF = 0 +EOL = 1 +WHITESPACE = 2 +IDENTIFIER = 3 +QUOTED_STRING = 4 +COMMENT = 5 +DELIMITER = 6 + + +class UngetBufferFull(dns.exception.DNSException): + """An attempt was made to unget a token when the unget buffer was full.""" + + +class Token: + """A DNS zone file format token. + + ttype: The token type + value: The token value + has_escape: Does the token value contain escapes? + """ + + def __init__( + self, + ttype: int, + value: Any = "", + has_escape: bool = False, + comment: Optional[str] = None, + ): + """Initialize a token instance.""" + + self.ttype = ttype + self.value = value + self.has_escape = has_escape + self.comment = comment + + def is_eof(self) -> bool: + return self.ttype == EOF + + def is_eol(self) -> bool: + return self.ttype == EOL + + def is_whitespace(self) -> bool: + return self.ttype == WHITESPACE + + def is_identifier(self) -> bool: + return self.ttype == IDENTIFIER + + def is_quoted_string(self) -> bool: + return self.ttype == QUOTED_STRING + + def is_comment(self) -> bool: + return self.ttype == COMMENT + + def is_delimiter(self) -> bool: # pragma: no cover (we don't return delimiters yet) + return self.ttype == DELIMITER + + def is_eol_or_eof(self) -> bool: + return self.ttype == EOL or self.ttype == EOF + + def __eq__(self, other): + if not isinstance(other, Token): + return False + return self.ttype == other.ttype and self.value == other.value + + def __ne__(self, other): + if not isinstance(other, Token): + return True + return self.ttype != other.ttype or self.value != other.value + + def __str__(self): + return '%d "%s"' % (self.ttype, self.value) + + def unescape(self) -> "Token": + if not self.has_escape: + return self + unescaped = "" + l = len(self.value) + i = 0 + while i < l: + c = self.value[i] + i += 1 + if c == "\\": + if i >= l: # pragma: no cover (can't happen via get()) + raise dns.exception.UnexpectedEnd + c = self.value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = self.value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = self.value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + c = chr(codepoint) + unescaped += c + return Token(self.ttype, unescaped) + + def unescape_to_bytes(self) -> "Token": + # We used to use unescape() for TXT-like records, but this + # caused problems as we'd process DNS escapes into Unicode code + # points instead of byte values, and then a to_text() of the + # processed data would not equal the original input. For + # example, \226 in the TXT record would have a to_text() of + # \195\162 because we applied UTF-8 encoding to Unicode code + # point 226. + # + # We now apply escapes while converting directly to bytes, + # avoiding this double encoding. + # + # This code also handles cases where the unicode input has + # non-ASCII code-points in it by converting it to UTF-8. TXT + # records aren't defined for Unicode, but this is the best we + # can do to preserve meaning. For example, + # + # foo\u200bbar + # + # (where \u200b is Unicode code point 0x200b) will be treated + # as if the input had been the UTF-8 encoding of that string, + # namely: + # + # foo\226\128\139bar + # + unescaped = b"" + l = len(self.value) + i = 0 + while i < l: + c = self.value[i] + i += 1 + if c == "\\": + if i >= l: # pragma: no cover (can't happen via get()) + raise dns.exception.UnexpectedEnd + c = self.value[i] + i += 1 + if c.isdigit(): + if i >= l: + raise dns.exception.UnexpectedEnd + c2 = self.value[i] + i += 1 + if i >= l: + raise dns.exception.UnexpectedEnd + c3 = self.value[i] + i += 1 + if not (c2.isdigit() and c3.isdigit()): + raise dns.exception.SyntaxError + codepoint = int(c) * 100 + int(c2) * 10 + int(c3) + if codepoint > 255: + raise dns.exception.SyntaxError + unescaped += b"%c" % (codepoint) + else: + # Note that as mentioned above, if c is a Unicode + # code point outside of the ASCII range, then this + # += is converting that code point to its UTF-8 + # encoding and appending multiple bytes to + # unescaped. + unescaped += c.encode() + else: + unescaped += c.encode() + return Token(self.ttype, bytes(unescaped)) + + +class Tokenizer: + """A DNS zone file format tokenizer. + + A token object is basically a (type, value) tuple. The valid + types are EOF, EOL, WHITESPACE, IDENTIFIER, QUOTED_STRING, + COMMENT, and DELIMITER. + + file: The file to tokenize + + ungotten_char: The most recently ungotten character, or None. + + ungotten_token: The most recently ungotten token, or None. + + multiline: The current multiline level. This value is increased + by one every time a '(' delimiter is read, and decreased by one every time + a ')' delimiter is read. + + quoting: This variable is true if the tokenizer is currently + reading a quoted string. + + eof: This variable is true if the tokenizer has encountered EOF. + + delimiters: The current delimiter dictionary. + + line_number: The current line number + + filename: A filename that will be returned by the where() method. + + idna_codec: A dns.name.IDNACodec, specifies the IDNA + encoder/decoder. If None, the default IDNA 2003 + encoder/decoder is used. + """ + + def __init__( + self, + f: Any = sys.stdin, + filename: Optional[str] = None, + idna_codec: Optional[dns.name.IDNACodec] = None, + ): + """Initialize a tokenizer instance. + + f: The file to tokenize. The default is sys.stdin. + This parameter may also be a string, in which case the tokenizer + will take its input from the contents of the string. + + filename: the name of the filename that the where() method + will return. + + idna_codec: A dns.name.IDNACodec, specifies the IDNA + encoder/decoder. If None, the default IDNA 2003 + encoder/decoder is used. + """ + + if isinstance(f, str): + f = io.StringIO(f) + if filename is None: + filename = "<string>" + elif isinstance(f, bytes): + f = io.StringIO(f.decode()) + if filename is None: + filename = "<string>" + else: + if filename is None: + if f is sys.stdin: + filename = "<stdin>" + else: + filename = "<file>" + self.file = f + self.ungotten_char: Optional[str] = None + self.ungotten_token: Optional[Token] = None + self.multiline = 0 + self.quoting = False + self.eof = False + self.delimiters = _DELIMITERS + self.line_number = 1 + assert filename is not None + self.filename = filename + if idna_codec is None: + self.idna_codec: dns.name.IDNACodec = dns.name.IDNA_2003 + else: + self.idna_codec = idna_codec + + def _get_char(self) -> str: + """Read a character from input.""" + + if self.ungotten_char is None: + if self.eof: + c = "" + else: + c = self.file.read(1) + if c == "": + self.eof = True + elif c == "\n": + self.line_number += 1 + else: + c = self.ungotten_char + self.ungotten_char = None + return c + + def where(self) -> Tuple[str, int]: + """Return the current location in the input. + + Returns a (string, int) tuple. The first item is the filename of + the input, the second is the current line number. + """ + + return (self.filename, self.line_number) + + def _unget_char(self, c: str) -> None: + """Unget a character. + + The unget buffer for characters is only one character large; it is + an error to try to unget a character when the unget buffer is not + empty. + + c: the character to unget + raises UngetBufferFull: there is already an ungotten char + """ + + if self.ungotten_char is not None: + # this should never happen! + raise UngetBufferFull # pragma: no cover + self.ungotten_char = c + + def skip_whitespace(self) -> int: + """Consume input until a non-whitespace character is encountered. + + The non-whitespace character is then ungotten, and the number of + whitespace characters consumed is returned. + + If the tokenizer is in multiline mode, then newlines are whitespace. + + Returns the number of characters skipped. + """ + + skipped = 0 + while True: + c = self._get_char() + if c != " " and c != "\t": + if (c != "\n") or not self.multiline: + self._unget_char(c) + return skipped + skipped += 1 + + def get(self, want_leading: bool = False, want_comment: bool = False) -> Token: + """Get the next token. + + want_leading: If True, return a WHITESPACE token if the + first character read is whitespace. The default is False. + + want_comment: If True, return a COMMENT token if the + first token read is a comment. The default is False. + + Raises dns.exception.UnexpectedEnd: input ended prematurely + + Raises dns.exception.SyntaxError: input was badly formed + + Returns a Token. + """ + + if self.ungotten_token is not None: + utoken = self.ungotten_token + self.ungotten_token = None + if utoken.is_whitespace(): + if want_leading: + return utoken + elif utoken.is_comment(): + if want_comment: + return utoken + else: + return utoken + skipped = self.skip_whitespace() + if want_leading and skipped > 0: + return Token(WHITESPACE, " ") + token = "" + ttype = IDENTIFIER + has_escape = False + while True: + c = self._get_char() + if c == "" or c in self.delimiters: + if c == "" and self.quoting: + raise dns.exception.UnexpectedEnd + if token == "" and ttype != QUOTED_STRING: + if c == "(": + self.multiline += 1 + self.skip_whitespace() + continue + elif c == ")": + if self.multiline <= 0: + raise dns.exception.SyntaxError + self.multiline -= 1 + self.skip_whitespace() + continue + elif c == '"': + if not self.quoting: + self.quoting = True + self.delimiters = _QUOTING_DELIMITERS + ttype = QUOTED_STRING + continue + else: + self.quoting = False + self.delimiters = _DELIMITERS + self.skip_whitespace() + continue + elif c == "\n": + return Token(EOL, "\n") + elif c == ";": + while 1: + c = self._get_char() + if c == "\n" or c == "": + break + token += c + if want_comment: + self._unget_char(c) + return Token(COMMENT, token) + elif c == "": + if self.multiline: + raise dns.exception.SyntaxError( + "unbalanced parentheses" + ) + return Token(EOF, comment=token) + elif self.multiline: + self.skip_whitespace() + token = "" + continue + else: + return Token(EOL, "\n", comment=token) + else: + # This code exists in case we ever want a + # delimiter to be returned. It never produces + # a token currently. + token = c + ttype = DELIMITER + else: + self._unget_char(c) + break + elif self.quoting and c == "\n": + raise dns.exception.SyntaxError("newline in quoted string") + elif c == "\\": + # + # It's an escape. Put it and the next character into + # the token; it will be checked later for goodness. + # + token += c + has_escape = True + c = self._get_char() + if c == "" or (c == "\n" and not self.quoting): + raise dns.exception.UnexpectedEnd + token += c + if token == "" and ttype != QUOTED_STRING: + if self.multiline: + raise dns.exception.SyntaxError("unbalanced parentheses") + ttype = EOF + return Token(ttype, token, has_escape) + + def unget(self, token: Token) -> None: + """Unget a token. + + The unget buffer for tokens is only one token large; it is + an error to try to unget a token when the unget buffer is not + empty. + + token: the token to unget + + Raises UngetBufferFull: there is already an ungotten token + """ + + if self.ungotten_token is not None: + raise UngetBufferFull + self.ungotten_token = token + + def next(self): + """Return the next item in an iteration. + + Returns a Token. + """ + + token = self.get() + if token.is_eof(): + raise StopIteration + return token + + __next__ = next + + def __iter__(self): + return self + + # Helpers + + def get_int(self, base: int = 10) -> int: + """Read the next token and interpret it as an unsigned integer. + + Raises dns.exception.SyntaxError if not an unsigned integer. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + if not token.value.isdigit(): + raise dns.exception.SyntaxError("expecting an integer") + return int(token.value, base) + + def get_uint8(self) -> int: + """Read the next token and interpret it as an 8-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not an 8-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int() + if value < 0 or value > 255: + raise dns.exception.SyntaxError( + "%d is not an unsigned 8-bit integer" % value + ) + return value + + def get_uint16(self, base: int = 10) -> int: + """Read the next token and interpret it as a 16-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 16-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 65535: + if base == 8: + raise dns.exception.SyntaxError( + "%o is not an octal unsigned 16-bit integer" % value + ) + else: + raise dns.exception.SyntaxError( + "%d is not an unsigned 16-bit integer" % value + ) + return value + + def get_uint32(self, base: int = 10) -> int: + """Read the next token and interpret it as a 32-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 32-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 4294967295: + raise dns.exception.SyntaxError( + "%d is not an unsigned 32-bit integer" % value + ) + return value + + def get_uint48(self, base: int = 10) -> int: + """Read the next token and interpret it as a 48-bit unsigned + integer. + + Raises dns.exception.SyntaxError if not a 48-bit unsigned integer. + + Returns an int. + """ + + value = self.get_int(base=base) + if value < 0 or value > 281474976710655: + raise dns.exception.SyntaxError( + "%d is not an unsigned 48-bit integer" % value + ) + return value + + def get_string(self, max_length: Optional[int] = None) -> str: + """Read the next token and interpret it as a string. + + Raises dns.exception.SyntaxError if not a string. + Raises dns.exception.SyntaxError if token value length + exceeds max_length (if specified). + + Returns a string. + """ + + token = self.get().unescape() + if not (token.is_identifier() or token.is_quoted_string()): + raise dns.exception.SyntaxError("expecting a string") + if max_length and len(token.value) > max_length: + raise dns.exception.SyntaxError("string too long") + return token.value + + def get_identifier(self) -> str: + """Read the next token, which should be an identifier. + + Raises dns.exception.SyntaxError if not an identifier. + + Returns a string. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + return token.value + + def get_remaining(self, max_tokens: Optional[int] = None) -> List[Token]: + """Return the remaining tokens on the line, until an EOL or EOF is seen. + + max_tokens: If not None, stop after this number of tokens. + + Returns a list of tokens. + """ + + tokens = [] + while True: + token = self.get() + if token.is_eol_or_eof(): + self.unget(token) + break + tokens.append(token) + if len(tokens) == max_tokens: + break + return tokens + + def concatenate_remaining_identifiers(self, allow_empty: bool = False) -> str: + """Read the remaining tokens on the line, which should be identifiers. + + Raises dns.exception.SyntaxError if there are no remaining tokens, + unless `allow_empty=True` is given. + + Raises dns.exception.SyntaxError if a token is seen that is not an + identifier. + + Returns a string containing a concatenation of the remaining + identifiers. + """ + s = "" + while True: + token = self.get().unescape() + if token.is_eol_or_eof(): + self.unget(token) + break + if not token.is_identifier(): + raise dns.exception.SyntaxError + s += token.value + if not (allow_empty or s): + raise dns.exception.SyntaxError("expecting another identifier") + return s + + def as_name( + self, + token: Token, + origin: Optional[dns.name.Name] = None, + relativize: bool = False, + relativize_to: Optional[dns.name.Name] = None, + ) -> dns.name.Name: + """Try to interpret the token as a DNS name. + + Raises dns.exception.SyntaxError if not a name. + + Returns a dns.name.Name. + """ + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + name = dns.name.from_text(token.value, origin, self.idna_codec) + return name.choose_relativity(relativize_to or origin, relativize) + + def get_name( + self, + origin: Optional[dns.name.Name] = None, + relativize: bool = False, + relativize_to: Optional[dns.name.Name] = None, + ) -> dns.name.Name: + """Read the next token and interpret it as a DNS name. + + Raises dns.exception.SyntaxError if not a name. + + Returns a dns.name.Name. + """ + + token = self.get() + return self.as_name(token, origin, relativize, relativize_to) + + def get_eol_as_token(self) -> Token: + """Read the next token and raise an exception if it isn't EOL or + EOF. + + Returns a string. + """ + + token = self.get() + if not token.is_eol_or_eof(): + raise dns.exception.SyntaxError( + 'expected EOL or EOF, got %d "%s"' % (token.ttype, token.value) + ) + return token + + def get_eol(self) -> str: + return self.get_eol_as_token().value + + def get_ttl(self) -> int: + """Read the next token and interpret it as a DNS TTL. + + Raises dns.exception.SyntaxError or dns.ttl.BadTTL if not an + identifier or badly formed. + + Returns an int. + """ + + token = self.get().unescape() + if not token.is_identifier(): + raise dns.exception.SyntaxError("expecting an identifier") + return dns.ttl.from_text(token.value) diff --git a/backend/test/lib/python3.8/site-packages/dns/transaction.py b/backend/test/lib/python3.8/site-packages/dns/transaction.py new file mode 100644 index 0000000000000000000000000000000000000000..21dea77545e1ad33739031987bd2106a6a0c8c4b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/transaction.py @@ -0,0 +1,651 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import collections +from typing import Any, Callable, Iterator, List, Optional, Tuple, Union + +import dns.exception +import dns.name +import dns.node +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rrset +import dns.serial +import dns.ttl + + +class TransactionManager: + def reader(self) -> "Transaction": + """Begin a read-only transaction.""" + raise NotImplementedError # pragma: no cover + + def writer(self, replacement: bool = False) -> "Transaction": + """Begin a writable transaction. + + *replacement*, a ``bool``. If `True`, the content of the + transaction completely replaces any prior content. If False, + the default, then the content of the transaction updates the + existing content. + """ + raise NotImplementedError # pragma: no cover + + def origin_information( + self, + ) -> Tuple[Optional[dns.name.Name], bool, Optional[dns.name.Name]]: + """Returns a tuple + + (absolute_origin, relativize, effective_origin) + + giving the absolute name of the default origin for any + relative domain names, the "effective origin", and whether + names should be relativized. The "effective origin" is the + absolute origin if relativize is False, and the empty name if + relativize is true. (The effective origin is provided even + though it can be computed from the absolute_origin and + relativize setting because it avoids a lot of code + duplication.) + + If the returned names are `None`, then no origin information is + available. + + This information is used by code working with transactions to + allow it to coordinate relativization. The transaction code + itself takes what it gets (i.e. does not change name + relativity). + + """ + raise NotImplementedError # pragma: no cover + + def get_class(self) -> dns.rdataclass.RdataClass: + """The class of the transaction manager.""" + raise NotImplementedError # pragma: no cover + + def from_wire_origin(self) -> Optional[dns.name.Name]: + """Origin to use in from_wire() calls.""" + (absolute_origin, relativize, _) = self.origin_information() + if relativize: + return absolute_origin + else: + return None + + +class DeleteNotExact(dns.exception.DNSException): + """Existing data did not match data specified by an exact delete.""" + + +class ReadOnly(dns.exception.DNSException): + """Tried to write to a read-only transaction.""" + + +class AlreadyEnded(dns.exception.DNSException): + """Tried to use an already-ended transaction.""" + + +def _ensure_immutable_rdataset(rdataset): + if rdataset is None or isinstance(rdataset, dns.rdataset.ImmutableRdataset): + return rdataset + return dns.rdataset.ImmutableRdataset(rdataset) + + +def _ensure_immutable_node(node): + if node is None or node.is_immutable(): + return node + return dns.node.ImmutableNode(node) + + +CheckPutRdatasetType = Callable[ + ["Transaction", dns.name.Name, dns.rdataset.Rdataset], None +] +CheckDeleteRdatasetType = Callable[ + ["Transaction", dns.name.Name, dns.rdatatype.RdataType, dns.rdatatype.RdataType], + None, +] +CheckDeleteNameType = Callable[["Transaction", dns.name.Name], None] + + +class Transaction: + def __init__( + self, + manager: TransactionManager, + replacement: bool = False, + read_only: bool = False, + ): + self.manager = manager + self.replacement = replacement + self.read_only = read_only + self._ended = False + self._check_put_rdataset: List[CheckPutRdatasetType] = [] + self._check_delete_rdataset: List[CheckDeleteRdatasetType] = [] + self._check_delete_name: List[CheckDeleteNameType] = [] + + # + # This is the high level API + # + # Note that we currently use non-immutable types in the return type signature to + # avoid covariance problems, e.g. if the caller has a List[Rdataset], mypy will be + # unhappy if we return an ImmutableRdataset. + + def get( + self, + name: Optional[Union[dns.name.Name, str]], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> dns.rdataset.Rdataset: + """Return the rdataset associated with *name*, *rdtype*, and *covers*, + or `None` if not found. + + Note that the returned rdataset is immutable. + """ + self._check_ended() + if isinstance(name, str): + name = dns.name.from_text(name, None) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + rdataset = self._get_rdataset(name, rdtype, covers) + return _ensure_immutable_rdataset(rdataset) + + def get_node(self, name: dns.name.Name) -> Optional[dns.node.Node]: + """Return the node at *name*, if any. + + Returns an immutable node or ``None``. + """ + return _ensure_immutable_node(self._get_node(name)) + + def _check_read_only(self) -> None: + if self.read_only: + raise ReadOnly + + def add(self, *args: Any) -> None: + """Add records. + + The arguments may be: + + - rrset + + - name, rdataset... + + - name, ttl, rdata... + """ + self._check_ended() + self._check_read_only() + self._add(False, args) + + def replace(self, *args: Any) -> None: + """Replace the existing rdataset at the name with the specified + rdataset, or add the specified rdataset if there was no existing + rdataset. + + The arguments may be: + + - rrset + + - name, rdataset... + + - name, ttl, rdata... + + Note that if you want to replace the entire node, you should do + a delete of the name followed by one or more calls to add() or + replace(). + """ + self._check_ended() + self._check_read_only() + self._add(True, args) + + def delete(self, *args: Any) -> None: + """Delete records. + + It is not an error if some of the records are not in the existing + set. + + The arguments may be: + + - rrset + + - name + + - name, rdataclass, rdatatype, [covers] + + - name, rdataset... + + - name, rdata... + """ + self._check_ended() + self._check_read_only() + self._delete(False, args) + + def delete_exact(self, *args: Any) -> None: + """Delete records. + + The arguments may be: + + - rrset + + - name + + - name, rdataclass, rdatatype, [covers] + + - name, rdataset... + + - name, rdata... + + Raises dns.transaction.DeleteNotExact if some of the records + are not in the existing set. + + """ + self._check_ended() + self._check_read_only() + self._delete(True, args) + + def name_exists(self, name: Union[dns.name.Name, str]) -> bool: + """Does the specified name exist?""" + self._check_ended() + if isinstance(name, str): + name = dns.name.from_text(name, None) + return self._name_exists(name) + + def update_serial( + self, + value: int = 1, + relative: bool = True, + name: dns.name.Name = dns.name.empty, + ) -> None: + """Update the serial number. + + *value*, an `int`, is an increment if *relative* is `True`, or the + actual value to set if *relative* is `False`. + + Raises `KeyError` if there is no SOA rdataset at *name*. + + Raises `ValueError` if *value* is negative or if the increment is + so large that it would cause the new serial to be less than the + prior value. + """ + self._check_ended() + if value < 0: + raise ValueError("negative update_serial() value") + if isinstance(name, str): + name = dns.name.from_text(name, None) + rdataset = self._get_rdataset(name, dns.rdatatype.SOA, dns.rdatatype.NONE) + if rdataset is None or len(rdataset) == 0: + raise KeyError + if relative: + serial = dns.serial.Serial(rdataset[0].serial) + value + else: + serial = dns.serial.Serial(value) + serial = serial.value # convert back to int + if serial == 0: + serial = 1 + rdata = rdataset[0].replace(serial=serial) + new_rdataset = dns.rdataset.from_rdata(rdataset.ttl, rdata) + self.replace(name, new_rdataset) + + def __iter__(self): + self._check_ended() + return self._iterate_rdatasets() + + def changed(self) -> bool: + """Has this transaction changed anything? + + For read-only transactions, the result is always `False`. + + For writable transactions, the result is `True` if at some time + during the life of the transaction, the content was changed. + """ + self._check_ended() + return self._changed() + + def commit(self) -> None: + """Commit the transaction. + + Normally transactions are used as context managers and commit + or rollback automatically, but it may be done explicitly if needed. + A ``dns.transaction.Ended`` exception will be raised if you try + to use a transaction after it has been committed or rolled back. + + Raises an exception if the commit fails (in which case the transaction + is also rolled back. + """ + self._end(True) + + def rollback(self) -> None: + """Rollback the transaction. + + Normally transactions are used as context managers and commit + or rollback automatically, but it may be done explicitly if needed. + A ``dns.transaction.AlreadyEnded`` exception will be raised if you try + to use a transaction after it has been committed or rolled back. + + Rollback cannot otherwise fail. + """ + self._end(False) + + def check_put_rdataset(self, check: CheckPutRdatasetType) -> None: + """Call *check* before putting (storing) an rdataset. + + The function is called with the transaction, the name, and the rdataset. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_put_rdataset.append(check) + + def check_delete_rdataset(self, check: CheckDeleteRdatasetType) -> None: + """Call *check* before deleting an rdataset. + + The function is called with the transaction, the name, the rdatatype, + and the covered rdatatype. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_delete_rdataset.append(check) + + def check_delete_name(self, check: CheckDeleteNameType) -> None: + """Call *check* before putting (storing) an rdataset. + + The function is called with the transaction and the name. + + The check function may safely make non-mutating transaction method + calls, but behavior is undefined if mutating transaction methods are + called. The check function should raise an exception if it objects to + the put, and otherwise should return ``None``. + """ + self._check_delete_name.append(check) + + def iterate_rdatasets( + self, + ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: + """Iterate all the rdatasets in the transaction, returning + (`dns.name.Name`, `dns.rdataset.Rdataset`) tuples. + + Note that as is usual with python iterators, adding or removing items + while iterating will invalidate the iterator and may raise `RuntimeError` + or fail to iterate over all entries.""" + self._check_ended() + return self._iterate_rdatasets() + + def iterate_names(self) -> Iterator[dns.name.Name]: + """Iterate all the names in the transaction. + + Note that as is usual with python iterators, adding or removing names + while iterating will invalidate the iterator and may raise `RuntimeError` + or fail to iterate over all entries.""" + self._check_ended() + return self._iterate_names() + + # + # Helper methods + # + + def _raise_if_not_empty(self, method, args): + if len(args) != 0: + raise TypeError(f"extra parameters to {method}") + + def _rdataset_from_args(self, method, deleting, args): + try: + arg = args.popleft() + if isinstance(arg, dns.rrset.RRset): + rdataset = arg.to_rdataset() + elif isinstance(arg, dns.rdataset.Rdataset): + rdataset = arg + else: + if deleting: + ttl = 0 + else: + if isinstance(arg, int): + ttl = arg + if ttl > dns.ttl.MAX_TTL: + raise ValueError(f"{method}: TTL value too big") + else: + raise TypeError(f"{method}: expected a TTL") + arg = args.popleft() + if isinstance(arg, dns.rdata.Rdata): + rdataset = dns.rdataset.from_rdata(ttl, arg) + else: + raise TypeError(f"{method}: expected an Rdata") + return rdataset + except IndexError: + if deleting: + return None + else: + # reraise + raise TypeError(f"{method}: expected more arguments") + + def _add(self, replace, args): + try: + args = collections.deque(args) + if replace: + method = "replace()" + else: + method = "add()" + arg = args.popleft() + if isinstance(arg, str): + arg = dns.name.from_text(arg, None) + if isinstance(arg, dns.name.Name): + name = arg + rdataset = self._rdataset_from_args(method, False, args) + elif isinstance(arg, dns.rrset.RRset): + rrset = arg + name = rrset.name + # rrsets are also rdatasets, but they don't print the + # same and can't be stored in nodes, so convert. + rdataset = rrset.to_rdataset() + else: + raise TypeError( + f"{method} requires a name or RRset as the first argument" + ) + if rdataset.rdclass != self.manager.get_class(): + raise ValueError(f"{method} has objects of wrong RdataClass") + if rdataset.rdtype == dns.rdatatype.SOA: + (_, _, origin) = self._origin_information() + if name != origin: + raise ValueError(f"{method} has non-origin SOA") + self._raise_if_not_empty(method, args) + if not replace: + existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) + if existing is not None: + if isinstance(existing, dns.rdataset.ImmutableRdataset): + trds = dns.rdataset.Rdataset( + existing.rdclass, existing.rdtype, existing.covers + ) + trds.update(existing) + existing = trds + rdataset = existing.union(rdataset) + self._checked_put_rdataset(name, rdataset) + except IndexError: + raise TypeError(f"not enough parameters to {method}") + + def _delete(self, exact, args): + try: + args = collections.deque(args) + if exact: + method = "delete_exact()" + else: + method = "delete()" + arg = args.popleft() + if isinstance(arg, str): + arg = dns.name.from_text(arg, None) + if isinstance(arg, dns.name.Name): + name = arg + if len(args) > 0 and ( + isinstance(args[0], int) or isinstance(args[0], str) + ): + # deleting by type and (optionally) covers + rdtype = dns.rdatatype.RdataType.make(args.popleft()) + if len(args) > 0: + covers = dns.rdatatype.RdataType.make(args.popleft()) + else: + covers = dns.rdatatype.NONE + self._raise_if_not_empty(method, args) + existing = self._get_rdataset(name, rdtype, covers) + if existing is None: + if exact: + raise DeleteNotExact(f"{method}: missing rdataset") + else: + self._delete_rdataset(name, rdtype, covers) + return + else: + rdataset = self._rdataset_from_args(method, True, args) + elif isinstance(arg, dns.rrset.RRset): + rdataset = arg # rrsets are also rdatasets + name = rdataset.name + else: + raise TypeError( + f"{method} requires a name or RRset as the first argument" + ) + self._raise_if_not_empty(method, args) + if rdataset: + if rdataset.rdclass != self.manager.get_class(): + raise ValueError(f"{method} has objects of wrong RdataClass") + existing = self._get_rdataset(name, rdataset.rdtype, rdataset.covers) + if existing is not None: + if exact: + intersection = existing.intersection(rdataset) + if intersection != rdataset: + raise DeleteNotExact(f"{method}: missing rdatas") + rdataset = existing.difference(rdataset) + if len(rdataset) == 0: + self._checked_delete_rdataset( + name, rdataset.rdtype, rdataset.covers + ) + else: + self._checked_put_rdataset(name, rdataset) + elif exact: + raise DeleteNotExact(f"{method}: missing rdataset") + else: + if exact and not self._name_exists(name): + raise DeleteNotExact(f"{method}: name not known") + self._checked_delete_name(name) + except IndexError: + raise TypeError(f"not enough parameters to {method}") + + def _check_ended(self): + if self._ended: + raise AlreadyEnded + + def _end(self, commit): + self._check_ended() + if self._ended: + raise AlreadyEnded + try: + self._end_transaction(commit) + finally: + self._ended = True + + def _checked_put_rdataset(self, name, rdataset): + for check in self._check_put_rdataset: + check(self, name, rdataset) + self._put_rdataset(name, rdataset) + + def _checked_delete_rdataset(self, name, rdtype, covers): + for check in self._check_delete_rdataset: + check(self, name, rdtype, covers) + self._delete_rdataset(name, rdtype, covers) + + def _checked_delete_name(self, name): + for check in self._check_delete_name: + check(self, name) + self._delete_name(name) + + # + # Transactions are context managers. + # + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if not self._ended: + if exc_type is None: + self.commit() + else: + self.rollback() + return False + + # + # This is the low level API, which must be implemented by subclasses + # of Transaction. + # + + def _get_rdataset(self, name, rdtype, covers): + """Return the rdataset associated with *name*, *rdtype*, and *covers*, + or `None` if not found. + """ + raise NotImplementedError # pragma: no cover + + def _put_rdataset(self, name, rdataset): + """Store the rdataset.""" + raise NotImplementedError # pragma: no cover + + def _delete_name(self, name): + """Delete all data associated with *name*. + + It is not an error if the name does not exist. + """ + raise NotImplementedError # pragma: no cover + + def _delete_rdataset(self, name, rdtype, covers): + """Delete all data associated with *name*, *rdtype*, and *covers*. + + It is not an error if the rdataset does not exist. + """ + raise NotImplementedError # pragma: no cover + + def _name_exists(self, name): + """Does name exist? + + Returns a bool. + """ + raise NotImplementedError # pragma: no cover + + def _changed(self): + """Has this transaction changed anything?""" + raise NotImplementedError # pragma: no cover + + def _end_transaction(self, commit): + """End the transaction. + + *commit*, a bool. If ``True``, commit the transaction, otherwise + roll it back. + + If committing and the commit fails, then roll back and raise an + exception. + """ + raise NotImplementedError # pragma: no cover + + def _set_origin(self, origin): + """Set the origin. + + This method is called when reading a possibly relativized + source, and an origin setting operation occurs (e.g. $ORIGIN + in a zone file). + """ + raise NotImplementedError # pragma: no cover + + def _iterate_rdatasets(self): + """Return an iterator that yields (name, rdataset) tuples.""" + raise NotImplementedError # pragma: no cover + + def _iterate_names(self): + """Return an iterator that yields a name.""" + raise NotImplementedError # pragma: no cover + + def _get_node(self, name): + """Return the node at *name*, if any. + + Returns a node or ``None``. + """ + raise NotImplementedError # pragma: no cover + + # + # Low-level API with a default implementation, in case a subclass needs + # to override. + # + + def _origin_information(self): + # This is only used by _add() + return self.manager.origin_information() diff --git a/backend/test/lib/python3.8/site-packages/dns/tsig.py b/backend/test/lib/python3.8/site-packages/dns/tsig.py new file mode 100644 index 0000000000000000000000000000000000000000..58760f5f5dcc502f38b7741fd084408d857758a2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/tsig.py @@ -0,0 +1,361 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2001-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TSIG support.""" + +import base64 +import hashlib +import hmac +import struct + +import dns.exception +import dns.name +import dns.rcode +import dns.rdataclass + + +class BadTime(dns.exception.DNSException): + + """The current time is not within the TSIG's validity time.""" + + +class BadSignature(dns.exception.DNSException): + + """The TSIG signature fails to verify.""" + + +class BadKey(dns.exception.DNSException): + + """The TSIG record owner name does not match the key.""" + + +class BadAlgorithm(dns.exception.DNSException): + + """The TSIG algorithm does not match the key.""" + + +class PeerError(dns.exception.DNSException): + + """Base class for all TSIG errors generated by the remote peer""" + + +class PeerBadKey(PeerError): + + """The peer didn't know the key we used""" + + +class PeerBadSignature(PeerError): + + """The peer didn't like the signature we sent""" + + +class PeerBadTime(PeerError): + + """The peer didn't like the time we sent""" + + +class PeerBadTruncation(PeerError): + + """The peer didn't like amount of truncation in the TSIG we sent""" + + +# TSIG Algorithms + +HMAC_MD5 = dns.name.from_text("HMAC-MD5.SIG-ALG.REG.INT") +HMAC_SHA1 = dns.name.from_text("hmac-sha1") +HMAC_SHA224 = dns.name.from_text("hmac-sha224") +HMAC_SHA256 = dns.name.from_text("hmac-sha256") +HMAC_SHA256_128 = dns.name.from_text("hmac-sha256-128") +HMAC_SHA384 = dns.name.from_text("hmac-sha384") +HMAC_SHA384_192 = dns.name.from_text("hmac-sha384-192") +HMAC_SHA512 = dns.name.from_text("hmac-sha512") +HMAC_SHA512_256 = dns.name.from_text("hmac-sha512-256") +GSS_TSIG = dns.name.from_text("gss-tsig") + +default_algorithm = HMAC_SHA256 + +mac_sizes = { + HMAC_SHA1: 20, + HMAC_SHA224: 28, + HMAC_SHA256: 32, + HMAC_SHA256_128: 16, + HMAC_SHA384: 48, + HMAC_SHA384_192: 24, + HMAC_SHA512: 64, + HMAC_SHA512_256: 32, + HMAC_MD5: 16, + GSS_TSIG: 128, # This is what we assume to be the worst case! +} + + +class GSSTSig: + """ + GSS-TSIG TSIG implementation. This uses the GSS-API context established + in the TKEY message handshake to sign messages using GSS-API message + integrity codes, per the RFC. + + In order to avoid a direct GSSAPI dependency, the keyring holds a ref + to the GSSAPI object required, rather than the key itself. + """ + + def __init__(self, gssapi_context): + self.gssapi_context = gssapi_context + self.data = b"" + self.name = "gss-tsig" + + def update(self, data): + self.data += data + + def sign(self): + # defer to the GSSAPI function to sign + return self.gssapi_context.get_signature(self.data) + + def verify(self, expected): + try: + # defer to the GSSAPI function to verify + return self.gssapi_context.verify_signature(self.data, expected) + except Exception: + # note the usage of a bare exception + raise BadSignature + + +class GSSTSigAdapter: + def __init__(self, keyring): + self.keyring = keyring + + def __call__(self, message, keyname): + if keyname in self.keyring: + key = self.keyring[keyname] + if isinstance(key, Key) and key.algorithm == GSS_TSIG: + if message: + GSSTSigAdapter.parse_tkey_and_step(key, message, keyname) + return key + else: + return None + + @classmethod + def parse_tkey_and_step(cls, key, message, keyname): + # if the message is a TKEY type, absorb the key material + # into the context using step(); this is used to allow the + # client to complete the GSSAPI negotiation before attempting + # to verify the signed response to a TKEY message exchange + try: + rrset = message.find_rrset( + message.answer, keyname, dns.rdataclass.ANY, dns.rdatatype.TKEY + ) + if rrset: + token = rrset[0].key + gssapi_context = key.secret + return gssapi_context.step(token) + except KeyError: + pass + + +class HMACTSig: + """ + HMAC TSIG implementation. This uses the HMAC python module to handle the + sign/verify operations. + """ + + _hashes = { + HMAC_SHA1: hashlib.sha1, + HMAC_SHA224: hashlib.sha224, + HMAC_SHA256: hashlib.sha256, + HMAC_SHA256_128: (hashlib.sha256, 128), + HMAC_SHA384: hashlib.sha384, + HMAC_SHA384_192: (hashlib.sha384, 192), + HMAC_SHA512: hashlib.sha512, + HMAC_SHA512_256: (hashlib.sha512, 256), + HMAC_MD5: hashlib.md5, + } + + def __init__(self, key, algorithm): + try: + hashinfo = self._hashes[algorithm] + except KeyError: + raise NotImplementedError(f"TSIG algorithm {algorithm} is not supported") + + # create the HMAC context + if isinstance(hashinfo, tuple): + self.hmac_context = hmac.new(key, digestmod=hashinfo[0]) + self.size = hashinfo[1] + else: + self.hmac_context = hmac.new(key, digestmod=hashinfo) + self.size = None + self.name = self.hmac_context.name + if self.size: + self.name += f"-{self.size}" + + def update(self, data): + return self.hmac_context.update(data) + + def sign(self): + # defer to the HMAC digest() function for that digestmod + digest = self.hmac_context.digest() + if self.size: + digest = digest[: (self.size // 8)] + return digest + + def verify(self, expected): + # re-digest and compare the results + mac = self.sign() + if not hmac.compare_digest(mac, expected): + raise BadSignature + + +def _digest(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=None): + """Return a context containing the TSIG rdata for the input parameters + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object + @raises ValueError: I{other_data} is too long + @raises NotImplementedError: I{algorithm} is not supported + """ + + first = not (ctx and multi) + if first: + ctx = get_context(key) + if request_mac: + ctx.update(struct.pack("!H", len(request_mac))) + ctx.update(request_mac) + ctx.update(struct.pack("!H", rdata.original_id)) + ctx.update(wire[2:]) + if first: + ctx.update(key.name.to_digestable()) + ctx.update(struct.pack("!H", dns.rdataclass.ANY)) + ctx.update(struct.pack("!I", 0)) + if time is None: + time = rdata.time_signed + upper_time = (time >> 32) & 0xFFFF + lower_time = time & 0xFFFFFFFF + time_encoded = struct.pack("!HIH", upper_time, lower_time, rdata.fudge) + other_len = len(rdata.other) + if other_len > 65535: + raise ValueError("TSIG Other Data is > 65535 bytes") + if first: + ctx.update(key.algorithm.to_digestable() + time_encoded) + ctx.update(struct.pack("!HH", rdata.error, other_len) + rdata.other) + else: + ctx.update(time_encoded) + return ctx + + +def _maybe_start_digest(key, mac, multi): + """If this is the first message in a multi-message sequence, + start a new context. + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object + """ + if multi: + ctx = get_context(key) + ctx.update(struct.pack("!H", len(mac))) + ctx.update(mac) + return ctx + else: + return None + + +def sign(wire, key, rdata, time=None, request_mac=None, ctx=None, multi=False): + """Return a (tsig_rdata, mac, ctx) tuple containing the HMAC TSIG rdata + for the input parameters, the HMAC MAC calculated by applying the + TSIG signature algorithm, and the TSIG digest context. + @rtype: (string, dns.tsig.HMACTSig or dns.tsig.GSSTSig object) + @raises ValueError: I{other_data} is too long + @raises NotImplementedError: I{algorithm} is not supported + """ + + ctx = _digest(wire, key, rdata, time, request_mac, ctx, multi) + mac = ctx.sign() + tsig = rdata.replace(time_signed=time, mac=mac) + + return (tsig, _maybe_start_digest(key, mac, multi)) + + +def validate( + wire, key, owner, rdata, now, request_mac, tsig_start, ctx=None, multi=False +): + """Validate the specified TSIG rdata against the other input parameters. + + @raises FormError: The TSIG is badly formed. + @raises BadTime: There is too much time skew between the client and the + server. + @raises BadSignature: The TSIG signature did not validate + @rtype: dns.tsig.HMACTSig or dns.tsig.GSSTSig object""" + + (adcount,) = struct.unpack("!H", wire[10:12]) + if adcount == 0: + raise dns.exception.FormError + adcount -= 1 + new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start] + if rdata.error != 0: + if rdata.error == dns.rcode.BADSIG: + raise PeerBadSignature + elif rdata.error == dns.rcode.BADKEY: + raise PeerBadKey + elif rdata.error == dns.rcode.BADTIME: + raise PeerBadTime + elif rdata.error == dns.rcode.BADTRUNC: + raise PeerBadTruncation + else: + raise PeerError("unknown TSIG error code %d" % rdata.error) + if abs(rdata.time_signed - now) > rdata.fudge: + raise BadTime + if key.name != owner: + raise BadKey + if key.algorithm != rdata.algorithm: + raise BadAlgorithm + ctx = _digest(new_wire, key, rdata, None, request_mac, ctx, multi) + ctx.verify(rdata.mac) + return _maybe_start_digest(key, rdata.mac, multi) + + +def get_context(key): + """Returns an HMAC context for the specified key. + + @rtype: HMAC context + @raises NotImplementedError: I{algorithm} is not supported + """ + + if key.algorithm == GSS_TSIG: + return GSSTSig(key.secret) + else: + return HMACTSig(key.secret, key.algorithm) + + +class Key: + def __init__(self, name, secret, algorithm=default_algorithm): + if isinstance(name, str): + name = dns.name.from_text(name) + self.name = name + if isinstance(secret, str): + secret = base64.decodebytes(secret.encode()) + self.secret = secret + if isinstance(algorithm, str): + algorithm = dns.name.from_text(algorithm) + self.algorithm = algorithm + + def __eq__(self, other): + return ( + isinstance(other, Key) + and self.name == other.name + and self.secret == other.secret + and self.algorithm == other.algorithm + ) + + def __repr__(self): + r = f"<DNS key name='{self.name}', " + f"algorithm='{self.algorithm}'" + if self.algorithm != GSS_TSIG: + r += f", secret='{base64.b64encode(self.secret).decode()}'" + r += ">" + return r diff --git a/backend/test/lib/python3.8/site-packages/dns/tsigkeyring.py b/backend/test/lib/python3.8/site-packages/dns/tsigkeyring.py new file mode 100644 index 0000000000000000000000000000000000000000..1010a79f8f3c1856b765fa11e01cb5b6e2f6ea64 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/tsigkeyring.py @@ -0,0 +1,68 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""A place to store TSIG keys.""" + +import base64 +from typing import Any, Dict + +import dns.name +import dns.tsig + + +def from_text(textring: Dict[str, Any]) -> Dict[dns.name.Name, dns.tsig.Key]: + """Convert a dictionary containing (textual DNS name, base64 secret) + pairs into a binary keyring which has (dns.name.Name, bytes) pairs, or + a dictionary containing (textual DNS name, (algorithm, base64 secret)) + pairs into a binary keyring which has (dns.name.Name, dns.tsig.Key) pairs. + @rtype: dict""" + + keyring = {} + for name, value in textring.items(): + kname = dns.name.from_text(name) + if isinstance(value, str): + keyring[kname] = dns.tsig.Key(kname, value).secret + else: + (algorithm, secret) = value + keyring[kname] = dns.tsig.Key(kname, secret, algorithm) + return keyring + + +def to_text(keyring: Dict[dns.name.Name, Any]) -> Dict[str, Any]: + """Convert a dictionary containing (dns.name.Name, dns.tsig.Key) pairs + into a text keyring which has (textual DNS name, (textual algorithm, + base64 secret)) pairs, or a dictionary containing (dns.name.Name, bytes) + pairs into a text keyring which has (textual DNS name, base64 secret) pairs. + @rtype: dict""" + + textring = {} + + def b64encode(secret): + return base64.encodebytes(secret).decode().rstrip() + + for name, key in keyring.items(): + tname = name.to_text() + if isinstance(key, bytes): + textring[tname] = b64encode(key) + else: + if isinstance(key.secret, bytes): + text_secret = b64encode(key.secret) + else: + text_secret = str(key.secret) + + textring[tname] = (key.algorithm.to_text(), text_secret) + return textring diff --git a/backend/test/lib/python3.8/site-packages/dns/ttl.py b/backend/test/lib/python3.8/site-packages/dns/ttl.py new file mode 100644 index 0000000000000000000000000000000000000000..264b0338b64e827a2078ebb0edac7daf1a49cd0c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/ttl.py @@ -0,0 +1,92 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS TTL conversion.""" + +from typing import Union + +import dns.exception + +# Technically TTLs are supposed to be between 0 and 2**31 - 1, with values +# greater than that interpreted as 0, but we do not impose this policy here +# as values > 2**31 - 1 occur in real world data. +# +# We leave it to applications to impose tighter bounds if desired. +MAX_TTL = 2**32 - 1 + + +class BadTTL(dns.exception.SyntaxError): + """DNS TTL value is not well-formed.""" + + +def from_text(text: str) -> int: + """Convert the text form of a TTL to an integer. + + The BIND 8 units syntax for TTLs (e.g. '1w6d4h3m10s') is supported. + + *text*, a ``str``, the textual TTL. + + Raises ``dns.ttl.BadTTL`` if the TTL is not well-formed. + + Returns an ``int``. + """ + + if text.isdigit(): + total = int(text) + elif len(text) == 0: + raise BadTTL + else: + total = 0 + current = 0 + need_digit = True + for c in text: + if c.isdigit(): + current *= 10 + current += int(c) + need_digit = False + else: + if need_digit: + raise BadTTL + c = c.lower() + if c == "w": + total += current * 604800 + elif c == "d": + total += current * 86400 + elif c == "h": + total += current * 3600 + elif c == "m": + total += current * 60 + elif c == "s": + total += current + else: + raise BadTTL("unknown unit '%s'" % c) + current = 0 + need_digit = True + if not current == 0: + raise BadTTL("trailing integer") + if total < 0 or total > MAX_TTL: + raise BadTTL("TTL should be between 0 and 2**32 - 1 (inclusive)") + return total + + +def make(value: Union[int, str]) -> int: + if isinstance(value, int): + return value + elif isinstance(value, str): + return dns.ttl.from_text(value) + else: + raise ValueError("cannot convert value to TTL") diff --git a/backend/test/lib/python3.8/site-packages/dns/update.py b/backend/test/lib/python3.8/site-packages/dns/update.py new file mode 100644 index 0000000000000000000000000000000000000000..bf1157acdfe7f4262afec600fd9a30691aa0f78d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/update.py @@ -0,0 +1,386 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Dynamic Update Support""" + +from typing import Any, List, Optional, Union + +import dns.message +import dns.name +import dns.opcode +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.tsig + + +class UpdateSection(dns.enum.IntEnum): + """Update sections""" + + ZONE = 0 + PREREQ = 1 + UPDATE = 2 + ADDITIONAL = 3 + + @classmethod + def _maximum(cls): + return 3 + + +class UpdateMessage(dns.message.Message): # lgtm[py/missing-equals] + # ignore the mypy error here as we mean to use a different enum + _section_enum = UpdateSection # type: ignore + + def __init__( + self, + zone: Optional[Union[dns.name.Name, str]] = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + keyring: Optional[Any] = None, + keyname: Optional[dns.name.Name] = None, + keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, + id: Optional[int] = None, + ): + """Initialize a new DNS Update object. + + See the documentation of the Message class for a complete + description of the keyring dictionary. + + *zone*, a ``dns.name.Name``, ``str``, or ``None``, the zone + which is being updated. ``None`` should only be used by dnspython's + message constructors, as a zone is required for the convenience + methods like ``add()``, ``replace()``, etc. + + *rdclass*, an ``int`` or ``str``, the class of the zone. + + The *keyring*, *keyname*, and *keyalgorithm* parameters are passed to + ``use_tsig()``; see its documentation for details. + """ + super().__init__(id=id) + self.flags |= dns.opcode.to_flags(dns.opcode.UPDATE) + if isinstance(zone, str): + zone = dns.name.from_text(zone) + self.origin = zone + rdclass = dns.rdataclass.RdataClass.make(rdclass) + self.zone_rdclass = rdclass + if self.origin: + self.find_rrset( + self.zone, + self.origin, + rdclass, + dns.rdatatype.SOA, + create=True, + force_unique=True, + ) + if keyring is not None: + self.use_tsig(keyring, keyname, algorithm=keyalgorithm) + + @property + def zone(self) -> List[dns.rrset.RRset]: + """The zone section.""" + return self.sections[0] + + @zone.setter + def zone(self, v): + self.sections[0] = v + + @property + def prerequisite(self) -> List[dns.rrset.RRset]: + """The prerequisite section.""" + return self.sections[1] + + @prerequisite.setter + def prerequisite(self, v): + self.sections[1] = v + + @property + def update(self) -> List[dns.rrset.RRset]: + """The update section.""" + return self.sections[2] + + @update.setter + def update(self, v): + self.sections[2] = v + + def _add_rr(self, name, ttl, rd, deleting=None, section=None): + """Add a single RR to the update section.""" + + if section is None: + section = self.update + covers = rd.covers() + rrset = self.find_rrset( + section, name, self.zone_rdclass, rd.rdtype, covers, deleting, True, True + ) + rrset.add(rd, ttl) + + def _add(self, replace, section, name, *args): + """Add records. + + *replace* is the replacement mode. If ``False``, + RRs are added to an existing RRset; if ``True``, the RRset + is replaced with the specified contents. The second + argument is the section to add to. The third argument + is always a name. The other arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + if replace: + self.delete(name, rds.rdtype) + for rd in rds: + self._add_rr(name, rds.ttl, rd, section=section) + else: + args = list(args) + ttl = int(args.pop(0)) + if isinstance(args[0], dns.rdata.Rdata): + if replace: + self.delete(name, args[0].rdtype) + for rd in args: + self._add_rr(name, ttl, rd, section=section) + else: + rdtype = dns.rdatatype.RdataType.make(args.pop(0)) + if replace: + self.delete(name, rdtype) + for s in args: + rd = dns.rdata.from_text(self.zone_rdclass, rdtype, s, self.origin) + self._add_rr(name, ttl, rd, section=section) + + def add(self, name: Union[dns.name.Name, str], *args: Any) -> None: + """Add records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + """ + + self._add(False, self.update, name, *args) + + def delete(self, name: Union[dns.name.Name, str], *args: Any) -> None: + """Delete records. + + The first argument is always a name. The other + arguments can be: + + - *empty* + + - rdataset... + + - rdata... + + - rdtype, [string...] + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset( + self.update, + name, + dns.rdataclass.ANY, + dns.rdatatype.ANY, + dns.rdatatype.NONE, + dns.rdataclass.ANY, + True, + True, + ) + elif isinstance(args[0], dns.rdataset.Rdataset): + for rds in args: + for rd in rds: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + largs = list(args) + if isinstance(largs[0], dns.rdata.Rdata): + for rd in largs: + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + else: + rdtype = dns.rdatatype.RdataType.make(largs.pop(0)) + if len(largs) == 0: + self.find_rrset( + self.update, + name, + self.zone_rdclass, + rdtype, + dns.rdatatype.NONE, + dns.rdataclass.ANY, + True, + True, + ) + else: + for s in largs: + rd = dns.rdata.from_text( + self.zone_rdclass, + rdtype, + s, # type: ignore[arg-type] + self.origin, + ) + self._add_rr(name, 0, rd, dns.rdataclass.NONE) + + def replace(self, name: Union[dns.name.Name, str], *args: Any) -> None: + """Replace records. + + The first argument is always a name. The other + arguments can be: + + - rdataset... + + - ttl, rdata... + + - ttl, rdtype, string... + + Note that if you want to replace the entire node, you should do + a delete of the name followed by one or more calls to add. + """ + + self._add(True, self.update, name, *args) + + def present(self, name: Union[dns.name.Name, str], *args: Any) -> None: + """Require that an owner name (and optionally an rdata type, + or specific rdataset) exists as a prerequisite to the + execution of the update. + + The first argument is always a name. + The other arguments can be: + + - rdataset... + + - rdata... + + - rdtype, string... + """ + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if len(args) == 0: + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.ANY, + dns.rdatatype.ANY, + dns.rdatatype.NONE, + None, + True, + True, + ) + elif ( + isinstance(args[0], dns.rdataset.Rdataset) + or isinstance(args[0], dns.rdata.Rdata) + or len(args) > 1 + ): + if not isinstance(args[0], dns.rdataset.Rdataset): + # Add a 0 TTL + largs = list(args) + largs.insert(0, 0) # type: ignore[arg-type] + self._add(False, self.prerequisite, name, *largs) + else: + self._add(False, self.prerequisite, name, *args) + else: + rdtype = dns.rdatatype.RdataType.make(args[0]) + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.ANY, + rdtype, + dns.rdatatype.NONE, + None, + True, + True, + ) + + def absent( + self, + name: Union[dns.name.Name, str], + rdtype: Optional[Union[dns.rdatatype.RdataType, str]] = None, + ) -> None: + """Require that an owner name (and optionally an rdata type) does + not exist as a prerequisite to the execution of the update.""" + + if isinstance(name, str): + name = dns.name.from_text(name, None) + if rdtype is None: + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.NONE, + dns.rdatatype.ANY, + dns.rdatatype.NONE, + None, + True, + True, + ) + else: + rdtype = dns.rdatatype.RdataType.make(rdtype) + self.find_rrset( + self.prerequisite, + name, + dns.rdataclass.NONE, + rdtype, + dns.rdatatype.NONE, + None, + True, + True, + ) + + def _get_one_rr_per_rrset(self, value): + # Updates are always one_rr_per_rrset + return True + + def _parse_rr_header(self, section, name, rdclass, rdtype): + deleting = None + empty = False + if section == UpdateSection.ZONE: + if ( + dns.rdataclass.is_metaclass(rdclass) + or rdtype != dns.rdatatype.SOA + or self.zone + ): + raise dns.exception.FormError + else: + if not self.zone: + raise dns.exception.FormError + if rdclass in (dns.rdataclass.ANY, dns.rdataclass.NONE): + deleting = rdclass + rdclass = self.zone[0].rdclass + empty = ( + deleting == dns.rdataclass.ANY or section == UpdateSection.PREREQ + ) + return (rdclass, rdtype, deleting, empty) + + +# backwards compatibility +Update = UpdateMessage + +### BEGIN generated UpdateSection constants + +ZONE = UpdateSection.ZONE +PREREQ = UpdateSection.PREREQ +UPDATE = UpdateSection.UPDATE +ADDITIONAL = UpdateSection.ADDITIONAL + +### END generated UpdateSection constants diff --git a/backend/test/lib/python3.8/site-packages/dns/version.py b/backend/test/lib/python3.8/site-packages/dns/version.py new file mode 100644 index 0000000000000000000000000000000000000000..1f1fbf2d5789ae0126dabc430e1a6dcbee2300f8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/version.py @@ -0,0 +1,58 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""dnspython release version information.""" + +#: MAJOR +MAJOR = 2 +#: MINOR +MINOR = 4 +#: MICRO +MICRO = 2 +#: RELEASELEVEL +RELEASELEVEL = 0x0F +#: SERIAL +SERIAL = 0 + +if RELEASELEVEL == 0x0F: # pragma: no cover lgtm[py/unreachable-statement] + #: version + version = "%d.%d.%d" % (MAJOR, MINOR, MICRO) # lgtm[py/unreachable-statement] +elif RELEASELEVEL == 0x00: # pragma: no cover lgtm[py/unreachable-statement] + version = "%d.%d.%ddev%d" % ( + MAJOR, + MINOR, + MICRO, + SERIAL, + ) # lgtm[py/unreachable-statement] +elif RELEASELEVEL == 0x0C: # pragma: no cover lgtm[py/unreachable-statement] + version = "%d.%d.%drc%d" % ( + MAJOR, + MINOR, + MICRO, + SERIAL, + ) # lgtm[py/unreachable-statement] +else: # pragma: no cover lgtm[py/unreachable-statement] + version = "%d.%d.%d%x%d" % ( + MAJOR, + MINOR, + MICRO, + RELEASELEVEL, + SERIAL, + ) # lgtm[py/unreachable-statement] + +#: hexversion +hexversion = MAJOR << 24 | MINOR << 16 | MICRO << 8 | RELEASELEVEL << 4 | SERIAL diff --git a/backend/test/lib/python3.8/site-packages/dns/versioned.py b/backend/test/lib/python3.8/site-packages/dns/versioned.py new file mode 100644 index 0000000000000000000000000000000000000000..fd78e674e6edbb0dc2dcab6bbc9515b4b2103520 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/versioned.py @@ -0,0 +1,318 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""DNS Versioned Zones.""" + +import collections +import threading +from typing import Callable, Deque, Optional, Set, Union + +import dns.exception +import dns.immutable +import dns.name +import dns.node +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.zone + + +class UseTransaction(dns.exception.DNSException): + """To alter a versioned zone, use a transaction.""" + + +# Backwards compatibility +Node = dns.zone.VersionedNode +ImmutableNode = dns.zone.ImmutableVersionedNode +Version = dns.zone.Version +WritableVersion = dns.zone.WritableVersion +ImmutableVersion = dns.zone.ImmutableVersion +Transaction = dns.zone.Transaction + + +class Zone(dns.zone.Zone): # lgtm[py/missing-equals] + __slots__ = [ + "_versions", + "_versions_lock", + "_write_txn", + "_write_waiters", + "_write_event", + "_pruning_policy", + "_readers", + ] + + node_factory = Node + + def __init__( + self, + origin: Optional[Union[dns.name.Name, str]], + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + pruning_policy: Optional[Callable[["Zone", Version], Optional[bool]]] = None, + ): + """Initialize a versioned zone object. + + *origin* is the origin of the zone. It may be a ``dns.name.Name``, + a ``str``, or ``None``. If ``None``, then the zone's origin will + be set by the first ``$ORIGIN`` line in a zone file. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *pruning policy*, a function taking a ``Zone`` and a ``Version`` and returning + a ``bool``, or ``None``. Should the version be pruned? If ``None``, + the default policy, which retains one version is used. + """ + super().__init__(origin, rdclass, relativize) + self._versions: Deque[Version] = collections.deque() + self._version_lock = threading.Lock() + if pruning_policy is None: + self._pruning_policy = self._default_pruning_policy + else: + self._pruning_policy = pruning_policy + self._write_txn: Optional[Transaction] = None + self._write_event: Optional[threading.Event] = None + self._write_waiters: Deque[threading.Event] = collections.deque() + self._readers: Set[Transaction] = set() + self._commit_version_unlocked( + None, WritableVersion(self, replacement=True), origin + ) + + def reader( + self, id: Optional[int] = None, serial: Optional[int] = None + ) -> Transaction: # pylint: disable=arguments-differ + if id is not None and serial is not None: + raise ValueError("cannot specify both id and serial") + with self._version_lock: + if id is not None: + version = None + for v in reversed(self._versions): + if v.id == id: + version = v + break + if version is None: + raise KeyError("version not found") + elif serial is not None: + if self.relativize: + oname = dns.name.empty + else: + assert self.origin is not None + oname = self.origin + version = None + for v in reversed(self._versions): + n = v.nodes.get(oname) + if n: + rds = n.get_rdataset(self.rdclass, dns.rdatatype.SOA) + if rds and rds[0].serial == serial: + version = v + break + if version is None: + raise KeyError("serial not found") + else: + version = self._versions[-1] + txn = Transaction(self, False, version) + self._readers.add(txn) + return txn + + def writer(self, replacement: bool = False) -> Transaction: + event = None + while True: + with self._version_lock: + # Checking event == self._write_event ensures that either + # no one was waiting before we got lucky and found no write + # txn, or we were the one who was waiting and got woken up. + # This prevents "taking cuts" when creating a write txn. + if self._write_txn is None and event == self._write_event: + # Creating the transaction defers version setup + # (i.e. copying the nodes dictionary) until we + # give up the lock, so that we hold the lock as + # short a time as possible. This is why we call + # _setup_version() below. + self._write_txn = Transaction( + self, replacement, make_immutable=True + ) + # give up our exclusive right to make a Transaction + self._write_event = None + break + # Someone else is writing already, so we will have to + # wait, but we want to do the actual wait outside the + # lock. + event = threading.Event() + self._write_waiters.append(event) + # wait (note we gave up the lock!) + # + # We only wake one sleeper at a time, so it's important + # that no event waiter can exit this method (e.g. via + # cancellation) without returning a transaction or waking + # someone else up. + # + # This is not a problem with Threading module threads as + # they cannot be canceled, but could be an issue with trio + # tasks when we do the async version of writer(). + # I.e. we'd need to do something like: + # + # try: + # event.wait() + # except trio.Cancelled: + # with self._version_lock: + # self._maybe_wakeup_one_waiter_unlocked() + # raise + # + event.wait() + # Do the deferred version setup. + self._write_txn._setup_version() + return self._write_txn + + def _maybe_wakeup_one_waiter_unlocked(self): + if len(self._write_waiters) > 0: + self._write_event = self._write_waiters.popleft() + self._write_event.set() + + # pylint: disable=unused-argument + def _default_pruning_policy(self, zone, version): + return True + + # pylint: enable=unused-argument + + def _prune_versions_unlocked(self): + assert len(self._versions) > 0 + # Don't ever prune a version greater than or equal to one that + # a reader has open. This pins versions in memory while the + # reader is open, and importantly lets the reader open a txn on + # a successor version (e.g. if generating an IXFR). + # + # Note our definition of least_kept also ensures we do not try to + # delete the greatest version. + if len(self._readers) > 0: + least_kept = min(txn.version.id for txn in self._readers) + else: + least_kept = self._versions[-1].id + while self._versions[0].id < least_kept and self._pruning_policy( + self, self._versions[0] + ): + self._versions.popleft() + + def set_max_versions(self, max_versions: Optional[int]) -> None: + """Set a pruning policy that retains up to the specified number + of versions + """ + if max_versions is not None and max_versions < 1: + raise ValueError("max versions must be at least 1") + if max_versions is None: + + def policy(zone, _): # pylint: disable=unused-argument + return False + + else: + + def policy(zone, _): + return len(zone._versions) > max_versions + + self.set_pruning_policy(policy) + + def set_pruning_policy( + self, policy: Optional[Callable[["Zone", Version], Optional[bool]]] + ) -> None: + """Set the pruning policy for the zone. + + The *policy* function takes a `Version` and returns `True` if + the version should be pruned, and `False` otherwise. `None` + may also be specified for policy, in which case the default policy + is used. + + Pruning checking proceeds from the least version and the first + time the function returns `False`, the checking stops. I.e. the + retained versions are always a consecutive sequence. + """ + if policy is None: + policy = self._default_pruning_policy + with self._version_lock: + self._pruning_policy = policy + self._prune_versions_unlocked() + + def _end_read(self, txn): + with self._version_lock: + self._readers.remove(txn) + self._prune_versions_unlocked() + + def _end_write_unlocked(self, txn): + assert self._write_txn == txn + self._write_txn = None + self._maybe_wakeup_one_waiter_unlocked() + + def _end_write(self, txn): + with self._version_lock: + self._end_write_unlocked(txn) + + def _commit_version_unlocked(self, txn, version, origin): + self._versions.append(version) + self._prune_versions_unlocked() + self.nodes = version.nodes + if self.origin is None: + self.origin = origin + # txn can be None in __init__ when we make the empty version. + if txn is not None: + self._end_write_unlocked(txn) + + def _commit_version(self, txn, version, origin): + with self._version_lock: + self._commit_version_unlocked(txn, version, origin) + + def _get_next_version_id(self): + if len(self._versions) > 0: + id = self._versions[-1].id + 1 + else: + id = 1 + return id + + def find_node( + self, name: Union[dns.name.Name, str], create: bool = False + ) -> dns.node.Node: + if create: + raise UseTransaction + return super().find_node(name) + + def delete_node(self, name: Union[dns.name.Name, str]) -> None: + raise UseTransaction + + def find_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise UseTransaction + rdataset = super().find_rdataset(name, rdtype, covers) + return dns.rdataset.ImmutableRdataset(rdataset) + + def get_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + if create: + raise UseTransaction + rdataset = super().get_rdataset(name, rdtype, covers) + if rdataset is not None: + return dns.rdataset.ImmutableRdataset(rdataset) + else: + return None + + def delete_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> None: + raise UseTransaction + + def replace_rdataset( + self, name: Union[dns.name.Name, str], replacement: dns.rdataset.Rdataset + ) -> None: + raise UseTransaction diff --git a/backend/test/lib/python3.8/site-packages/dns/win32util.py b/backend/test/lib/python3.8/site-packages/dns/win32util.py new file mode 100644 index 0000000000000000000000000000000000000000..b2ca61dae9a0a48bdf7ae73720b4154891cd46b2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/win32util.py @@ -0,0 +1,249 @@ +import sys + +if sys.platform == "win32": + from typing import Any + + import dns.name + + _prefer_wmi = True + + import winreg # pylint: disable=import-error + + # Keep pylint quiet on non-windows. + try: + WindowsError is None # pylint: disable=used-before-assignment + except KeyError: + WindowsError = Exception + + try: + import threading + + import pythoncom # pylint: disable=import-error + import wmi # pylint: disable=import-error + + _have_wmi = True + except Exception: + _have_wmi = False + + def _config_domain(domain): + # Sometimes DHCP servers add a '.' prefix to the default domain, and + # Windows just stores such values in the registry (see #687). + # Check for this and fix it. + if domain.startswith("."): + domain = domain[1:] + return dns.name.from_text(domain) + + class DnsInfo: + def __init__(self): + self.domain = None + self.nameservers = [] + self.search = [] + + if _have_wmi: + + class _WMIGetter(threading.Thread): + def __init__(self): + super().__init__() + self.info = DnsInfo() + + def run(self): + pythoncom.CoInitialize() + try: + system = wmi.WMI() + for interface in system.Win32_NetworkAdapterConfiguration(): + if interface.IPEnabled and interface.DNSDomain: + self.info.domain = _config_domain(interface.DNSDomain) + self.info.nameservers = list(interface.DNSServerSearchOrder) + if interface.DNSDomainSuffixSearchOrder: + self.info.search = [ + _config_domain(x) + for x in interface.DNSDomainSuffixSearchOrder + ] + break + finally: + pythoncom.CoUninitialize() + + def get(self): + # We always run in a separate thread to avoid any issues with + # the COM threading model. + self.start() + self.join() + return self.info + + else: + + class _WMIGetter: # type: ignore + pass + + class _RegistryGetter: + def __init__(self): + self.info = DnsInfo() + + def _determine_split_char(self, entry): + # + # The windows registry irritatingly changes the list element + # delimiter in between ' ' and ',' (and vice-versa) in various + # versions of windows. + # + if entry.find(" ") >= 0: + split_char = " " + elif entry.find(",") >= 0: + split_char = "," + else: + # probably a singleton; treat as a space-separated list. + split_char = " " + return split_char + + def _config_nameservers(self, nameservers): + split_char = self._determine_split_char(nameservers) + ns_list = nameservers.split(split_char) + for ns in ns_list: + if ns not in self.info.nameservers: + self.info.nameservers.append(ns) + + def _config_search(self, search): + split_char = self._determine_split_char(search) + search_list = search.split(split_char) + for s in search_list: + s = _config_domain(s) + if s not in self.info.search: + self.info.search.append(s) + + def _config_fromkey(self, key, always_try_domain): + try: + servers, _ = winreg.QueryValueEx(key, "NameServer") + except WindowsError: + servers = None + if servers: + self._config_nameservers(servers) + if servers or always_try_domain: + try: + dom, _ = winreg.QueryValueEx(key, "Domain") + if dom: + self.info.domain = _config_domain(dom) + except WindowsError: + pass + else: + try: + servers, _ = winreg.QueryValueEx(key, "DhcpNameServer") + except WindowsError: + servers = None + if servers: + self._config_nameservers(servers) + try: + dom, _ = winreg.QueryValueEx(key, "DhcpDomain") + if dom: + self.info.domain = _config_domain(dom) + except WindowsError: + pass + try: + search, _ = winreg.QueryValueEx(key, "SearchList") + except WindowsError: + search = None + if search is None: + try: + search, _ = winreg.QueryValueEx(key, "DhcpSearchList") + except WindowsError: + search = None + if search: + self._config_search(search) + + def _is_nic_enabled(self, lm, guid): + # Look in the Windows Registry to determine whether the network + # interface corresponding to the given guid is enabled. + # + # (Code contributed by Paul Marks, thanks!) + # + try: + # This hard-coded location seems to be consistent, at least + # from Windows 2000 through Vista. + connection_key = winreg.OpenKey( + lm, + r"SYSTEM\CurrentControlSet\Control\Network" + r"\{4D36E972-E325-11CE-BFC1-08002BE10318}" + r"\%s\Connection" % guid, + ) + + try: + # The PnpInstanceID points to a key inside Enum + (pnp_id, ttype) = winreg.QueryValueEx( + connection_key, "PnpInstanceID" + ) + + if ttype != winreg.REG_SZ: + raise ValueError # pragma: no cover + + device_key = winreg.OpenKey( + lm, r"SYSTEM\CurrentControlSet\Enum\%s" % pnp_id + ) + + try: + # Get ConfigFlags for this device + (flags, ttype) = winreg.QueryValueEx(device_key, "ConfigFlags") + + if ttype != winreg.REG_DWORD: + raise ValueError # pragma: no cover + + # Based on experimentation, bit 0x1 indicates that the + # device is disabled. + # + # XXXRTH I suspect we really want to & with 0x03 so + # that CONFIGFLAGS_REMOVED devices are also ignored, + # but we're shifting to WMI as ConfigFlags is not + # supposed to be used. + return not flags & 0x1 + + finally: + device_key.Close() + finally: + connection_key.Close() + except Exception: # pragma: no cover + return False + + def get(self): + """Extract resolver configuration from the Windows registry.""" + + lm = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE) + try: + tcp_params = winreg.OpenKey( + lm, r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters" + ) + try: + self._config_fromkey(tcp_params, True) + finally: + tcp_params.Close() + interfaces = winreg.OpenKey( + lm, + r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters\Interfaces", + ) + try: + i = 0 + while True: + try: + guid = winreg.EnumKey(interfaces, i) + i += 1 + key = winreg.OpenKey(interfaces, guid) + try: + if not self._is_nic_enabled(lm, guid): + continue + self._config_fromkey(key, False) + finally: + key.Close() + except EnvironmentError: + break + finally: + interfaces.Close() + finally: + lm.Close() + return self.info + + _getter_class: Any + if _have_wmi and _prefer_wmi: + _getter_class = _WMIGetter + else: + _getter_class = _RegistryGetter + + def get_dns_info(): + """Extract resolver configuration.""" + getter = _getter_class() + return getter.get() diff --git a/backend/test/lib/python3.8/site-packages/dns/wire.py b/backend/test/lib/python3.8/site-packages/dns/wire.py new file mode 100644 index 0000000000000000000000000000000000000000..9f9b1573d521a924a43dde6c18a59912612798d8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/wire.py @@ -0,0 +1,89 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +import contextlib +import struct +from typing import Iterator, Optional, Tuple + +import dns.exception +import dns.name + + +class Parser: + def __init__(self, wire: bytes, current: int = 0): + self.wire = wire + self.current = 0 + self.end = len(self.wire) + if current: + self.seek(current) + self.furthest = current + + def remaining(self) -> int: + return self.end - self.current + + def get_bytes(self, size: int) -> bytes: + assert size >= 0 + if size > self.remaining(): + raise dns.exception.FormError + output = self.wire[self.current : self.current + size] + self.current += size + self.furthest = max(self.furthest, self.current) + return output + + def get_counted_bytes(self, length_size: int = 1) -> bytes: + length = int.from_bytes(self.get_bytes(length_size), "big") + return self.get_bytes(length) + + def get_remaining(self) -> bytes: + return self.get_bytes(self.remaining()) + + def get_uint8(self) -> int: + return struct.unpack("!B", self.get_bytes(1))[0] + + def get_uint16(self) -> int: + return struct.unpack("!H", self.get_bytes(2))[0] + + def get_uint32(self) -> int: + return struct.unpack("!I", self.get_bytes(4))[0] + + def get_uint48(self) -> int: + return int.from_bytes(self.get_bytes(6), "big") + + def get_struct(self, format: str) -> Tuple: + return struct.unpack(format, self.get_bytes(struct.calcsize(format))) + + def get_name(self, origin: Optional["dns.name.Name"] = None) -> "dns.name.Name": + name = dns.name.from_wire_parser(self) + if origin: + name = name.relativize(origin) + return name + + def seek(self, where: int) -> None: + # Note that seeking to the end is OK! (If you try to read + # after such a seek, you'll get an exception as expected.) + if where < 0 or where > self.end: + raise dns.exception.FormError + self.current = where + + @contextlib.contextmanager + def restrict_to(self, size: int) -> Iterator: + assert size >= 0 + if size > self.remaining(): + raise dns.exception.FormError + saved_end = self.end + try: + self.end = self.current + size + yield + # We make this check here and not in the finally as we + # don't want to raise if we're already raising for some + # other reason. + if self.current != self.end: + raise dns.exception.FormError + finally: + self.end = saved_end + + @contextlib.contextmanager + def restore_furthest(self) -> Iterator: + try: + yield None + finally: + self.current = self.furthest diff --git a/backend/test/lib/python3.8/site-packages/dns/xfr.py b/backend/test/lib/python3.8/site-packages/dns/xfr.py new file mode 100644 index 0000000000000000000000000000000000000000..dd247d33db4b6e827e5c540cf0e23965b0b0e10b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/xfr.py @@ -0,0 +1,343 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2017 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from typing import Any, List, Optional, Tuple, Union + +import dns.exception +import dns.message +import dns.name +import dns.rcode +import dns.rdataset +import dns.rdatatype +import dns.serial +import dns.transaction +import dns.tsig +import dns.zone + + +class TransferError(dns.exception.DNSException): + """A zone transfer response got a non-zero rcode.""" + + def __init__(self, rcode): + message = "Zone transfer error: %s" % dns.rcode.to_text(rcode) + super().__init__(message) + self.rcode = rcode + + +class SerialWentBackwards(dns.exception.FormError): + """The current serial number is less than the serial we know.""" + + +class UseTCP(dns.exception.DNSException): + """This IXFR cannot be completed with UDP.""" + + +class Inbound: + """ + State machine for zone transfers. + """ + + def __init__( + self, + txn_manager: dns.transaction.TransactionManager, + rdtype: dns.rdatatype.RdataType = dns.rdatatype.AXFR, + serial: Optional[int] = None, + is_udp: bool = False, + ): + """Initialize an inbound zone transfer. + + *txn_manager* is a :py:class:`dns.transaction.TransactionManager`. + + *rdtype* can be `dns.rdatatype.AXFR` or `dns.rdatatype.IXFR` + + *serial* is the base serial number for IXFRs, and is required in + that case. + + *is_udp*, a ``bool`` indidicates if UDP is being used for this + XFR. + """ + self.txn_manager = txn_manager + self.txn: Optional[dns.transaction.Transaction] = None + self.rdtype = rdtype + if rdtype == dns.rdatatype.IXFR: + if serial is None: + raise ValueError("a starting serial must be supplied for IXFRs") + elif is_udp: + raise ValueError("is_udp specified for AXFR") + self.serial = serial + self.is_udp = is_udp + (_, _, self.origin) = txn_manager.origin_information() + self.soa_rdataset: Optional[dns.rdataset.Rdataset] = None + self.done = False + self.expecting_SOA = False + self.delete_mode = False + + def process_message(self, message: dns.message.Message) -> bool: + """Process one message in the transfer. + + The message should have the same relativization as was specified when + the `dns.xfr.Inbound` was created. The message should also have been + created with `one_rr_per_rrset=True` because order matters. + + Returns `True` if the transfer is complete, and `False` otherwise. + """ + if self.txn is None: + replacement = self.rdtype == dns.rdatatype.AXFR + self.txn = self.txn_manager.writer(replacement) + rcode = message.rcode() + if rcode != dns.rcode.NOERROR: + raise TransferError(rcode) + # + # We don't require a question section, but if it is present is + # should be correct. + # + if len(message.question) > 0: + if message.question[0].name != self.origin: + raise dns.exception.FormError("wrong question name") + if message.question[0].rdtype != self.rdtype: + raise dns.exception.FormError("wrong question rdatatype") + answer_index = 0 + if self.soa_rdataset is None: + # + # This is the first message. We're expecting an SOA at + # the origin. + # + if not message.answer or message.answer[0].name != self.origin: + raise dns.exception.FormError("No answer or RRset not for zone origin") + rrset = message.answer[0] + rdataset = rrset + if rdataset.rdtype != dns.rdatatype.SOA: + raise dns.exception.FormError("first RRset is not an SOA") + answer_index = 1 + self.soa_rdataset = rdataset.copy() + if self.rdtype == dns.rdatatype.IXFR: + if self.soa_rdataset[0].serial == self.serial: + # + # We're already up-to-date. + # + self.done = True + elif dns.serial.Serial(self.soa_rdataset[0].serial) < self.serial: + # It went backwards! + raise SerialWentBackwards + else: + if self.is_udp and len(message.answer[answer_index:]) == 0: + # + # There are no more records, so this is the + # "truncated" response. Say to use TCP + # + raise UseTCP + # + # Note we're expecting another SOA so we can detect + # if this IXFR response is an AXFR-style response. + # + self.expecting_SOA = True + # + # Process the answer section (other than the initial SOA in + # the first message). + # + for rrset in message.answer[answer_index:]: + name = rrset.name + rdataset = rrset + if self.done: + raise dns.exception.FormError("answers after final SOA") + assert self.txn is not None # for mypy + if rdataset.rdtype == dns.rdatatype.SOA and name == self.origin: + # + # Every time we see an origin SOA delete_mode inverts + # + if self.rdtype == dns.rdatatype.IXFR: + self.delete_mode = not self.delete_mode + # + # If this SOA Rdataset is equal to the first we saw + # then we're finished. If this is an IXFR we also + # check that we're seeing the record in the expected + # part of the response. + # + if rdataset == self.soa_rdataset and ( + self.rdtype == dns.rdatatype.AXFR + or (self.rdtype == dns.rdatatype.IXFR and self.delete_mode) + ): + # + # This is the final SOA + # + if self.expecting_SOA: + # We got an empty IXFR sequence! + raise dns.exception.FormError("empty IXFR sequence") + if ( + self.rdtype == dns.rdatatype.IXFR + and self.serial != rdataset[0].serial + ): + raise dns.exception.FormError("unexpected end of IXFR sequence") + self.txn.replace(name, rdataset) + self.txn.commit() + self.txn = None + self.done = True + else: + # + # This is not the final SOA + # + self.expecting_SOA = False + if self.rdtype == dns.rdatatype.IXFR: + if self.delete_mode: + # This is the start of an IXFR deletion set + if rdataset[0].serial != self.serial: + raise dns.exception.FormError( + "IXFR base serial mismatch" + ) + else: + # This is the start of an IXFR addition set + self.serial = rdataset[0].serial + self.txn.replace(name, rdataset) + else: + # We saw a non-final SOA for the origin in an AXFR. + raise dns.exception.FormError("unexpected origin SOA in AXFR") + continue + if self.expecting_SOA: + # + # We made an IXFR request and are expecting another + # SOA RR, but saw something else, so this must be an + # AXFR response. + # + self.rdtype = dns.rdatatype.AXFR + self.expecting_SOA = False + self.delete_mode = False + self.txn.rollback() + self.txn = self.txn_manager.writer(True) + # + # Note we are falling through into the code below + # so whatever rdataset this was gets written. + # + # Add or remove the data + if self.delete_mode: + self.txn.delete_exact(name, rdataset) + else: + self.txn.add(name, rdataset) + if self.is_udp and not self.done: + # + # This is a UDP IXFR and we didn't get to done, and we didn't + # get the proper "truncated" response + # + raise dns.exception.FormError("unexpected end of UDP IXFR") + return self.done + + # + # Inbounds are context managers. + # + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if self.txn: + self.txn.rollback() + return False + + +def make_query( + txn_manager: dns.transaction.TransactionManager, + serial: Optional[int] = 0, + use_edns: Optional[Union[int, bool]] = None, + ednsflags: Optional[int] = None, + payload: Optional[int] = None, + request_payload: Optional[int] = None, + options: Optional[List[dns.edns.Option]] = None, + keyring: Any = None, + keyname: Optional[dns.name.Name] = None, + keyalgorithm: Union[dns.name.Name, str] = dns.tsig.default_algorithm, +) -> Tuple[dns.message.QueryMessage, Optional[int]]: + """Make an AXFR or IXFR query. + + *txn_manager* is a ``dns.transaction.TransactionManager``, typically a + ``dns.zone.Zone``. + + *serial* is an ``int`` or ``None``. If 0, then IXFR will be + attempted using the most recent serial number from the + *txn_manager*; it is the caller's responsibility to ensure there + are no write transactions active that could invalidate the + retrieved serial. If a serial cannot be determined, AXFR will be + forced. Other integer values are the starting serial to use. + ``None`` forces an AXFR. + + Please see the documentation for :py:func:`dns.message.make_query` and + :py:func:`dns.message.Message.use_tsig` for details on the other parameters + to this function. + + Returns a `(query, serial)` tuple. + """ + (zone_origin, _, origin) = txn_manager.origin_information() + if zone_origin is None: + raise ValueError("no zone origin") + if serial is None: + rdtype = dns.rdatatype.AXFR + elif not isinstance(serial, int): + raise ValueError("serial is not an integer") + elif serial == 0: + with txn_manager.reader() as txn: + rdataset = txn.get(origin, "SOA") + if rdataset: + serial = rdataset[0].serial + rdtype = dns.rdatatype.IXFR + else: + serial = None + rdtype = dns.rdatatype.AXFR + elif serial > 0 and serial < 4294967296: + rdtype = dns.rdatatype.IXFR + else: + raise ValueError("serial out-of-range") + rdclass = txn_manager.get_class() + q = dns.message.make_query( + zone_origin, + rdtype, + rdclass, + use_edns, + False, + ednsflags, + payload, + request_payload, + options, + ) + if serial is not None: + rdata = dns.rdata.from_text(rdclass, "SOA", f". . {serial} 0 0 0 0") + rrset = q.find_rrset( + q.authority, zone_origin, rdclass, dns.rdatatype.SOA, create=True + ) + rrset.add(rdata, 0) + if keyring is not None: + q.use_tsig(keyring, keyname, algorithm=keyalgorithm) + return (q, serial) + + +def extract_serial_from_query(query: dns.message.Message) -> Optional[int]: + """Extract the SOA serial number from query if it is an IXFR and return + it, otherwise return None. + + *query* is a dns.message.QueryMessage that is an IXFR or AXFR request. + + Raises if the query is not an IXFR or AXFR, or if an IXFR doesn't have + an appropriate SOA RRset in the authority section. + """ + if not isinstance(query, dns.message.QueryMessage): + raise ValueError("query not a QueryMessage") + question = query.question[0] + if question.rdtype == dns.rdatatype.AXFR: + return None + elif question.rdtype != dns.rdatatype.IXFR: + raise ValueError("query is not an AXFR or IXFR") + soa = query.find_rrset( + query.authority, question.name, question.rdclass, dns.rdatatype.SOA + ) + return soa[0].serial diff --git a/backend/test/lib/python3.8/site-packages/dns/zone.py b/backend/test/lib/python3.8/site-packages/dns/zone.py new file mode 100644 index 0000000000000000000000000000000000000000..9e763f5f0cee92b99f397af83969fb2da8e28745 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/zone.py @@ -0,0 +1,1395 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Zones.""" + +import contextlib +import io +import os +import struct +from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Tuple, Union + +import dns.exception +import dns.grange +import dns.immutable +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdataset +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.rdtypes.ANY.ZONEMD +import dns.rrset +import dns.tokenizer +import dns.transaction +import dns.ttl +import dns.zonefile +from dns.zonetypes import DigestHashAlgorithm, DigestScheme, _digest_hashers + + +class BadZone(dns.exception.DNSException): + + """The DNS zone is malformed.""" + + +class NoSOA(BadZone): + + """The DNS zone has no SOA RR at its origin.""" + + +class NoNS(BadZone): + + """The DNS zone has no NS RRset at its origin.""" + + +class UnknownOrigin(BadZone): + + """The DNS zone's origin is unknown.""" + + +class UnsupportedDigestScheme(dns.exception.DNSException): + + """The zone digest's scheme is unsupported.""" + + +class UnsupportedDigestHashAlgorithm(dns.exception.DNSException): + + """The zone digest's origin is unsupported.""" + + +class NoDigest(dns.exception.DNSException): + + """The DNS zone has no ZONEMD RRset at its origin.""" + + +class DigestVerificationFailure(dns.exception.DNSException): + + """The ZONEMD digest failed to verify.""" + + +class Zone(dns.transaction.TransactionManager): + + """A DNS zone. + + A ``Zone`` is a mapping from names to nodes. The zone object may be + treated like a Python dictionary, e.g. ``zone[name]`` will retrieve + the node associated with that name. The *name* may be a + ``dns.name.Name object``, or it may be a string. In either case, + if the name is relative it is treated as relative to the origin of + the zone. + """ + + node_factory = dns.node.Node + + __slots__ = ["rdclass", "origin", "nodes", "relativize"] + + def __init__( + self, + origin: Optional[Union[dns.name.Name, str]], + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + ): + """Initialize a zone object. + + *origin* is the origin of the zone. It may be a ``dns.name.Name``, + a ``str``, or ``None``. If ``None``, then the zone's origin will + be set by the first ``$ORIGIN`` line in a zone file. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + """ + + if origin is not None: + if isinstance(origin, str): + origin = dns.name.from_text(origin) + elif not isinstance(origin, dns.name.Name): + raise ValueError("origin parameter must be convertible to a DNS name") + if not origin.is_absolute(): + raise ValueError("origin parameter must be an absolute name") + self.origin = origin + self.rdclass = rdclass + self.nodes: Dict[dns.name.Name, dns.node.Node] = {} + self.relativize = relativize + + def __eq__(self, other): + """Two zones are equal if they have the same origin, class, and + nodes. + + Returns a ``bool``. + """ + + if not isinstance(other, Zone): + return False + if ( + self.rdclass != other.rdclass + or self.origin != other.origin + or self.nodes != other.nodes + ): + return False + return True + + def __ne__(self, other): + """Are two zones not equal? + + Returns a ``bool``. + """ + + return not self.__eq__(other) + + def _validate_name(self, name: Union[dns.name.Name, str]) -> dns.name.Name: + if isinstance(name, str): + name = dns.name.from_text(name, None) + elif not isinstance(name, dns.name.Name): + raise KeyError("name parameter must be convertible to a DNS name") + if name.is_absolute(): + if self.origin is None: + # This should probably never happen as other code (e.g. + # _rr_line) will notice the lack of an origin before us, but + # we check just in case! + raise KeyError("no zone origin is defined") + if not name.is_subdomain(self.origin): + raise KeyError("name parameter must be a subdomain of the zone origin") + if self.relativize: + name = name.relativize(self.origin) + elif not self.relativize: + # We have a relative name in a non-relative zone, so derelativize. + if self.origin is None: + raise KeyError("no zone origin is defined") + name = name.derelativize(self.origin) + return name + + def __getitem__(self, key): + key = self._validate_name(key) + return self.nodes[key] + + def __setitem__(self, key, value): + key = self._validate_name(key) + self.nodes[key] = value + + def __delitem__(self, key): + key = self._validate_name(key) + del self.nodes[key] + + def __iter__(self): + return self.nodes.__iter__() + + def keys(self): + return self.nodes.keys() + + def values(self): + return self.nodes.values() + + def items(self): + return self.nodes.items() + + def get(self, key): + key = self._validate_name(key) + return self.nodes.get(key) + + def __contains__(self, key): + key = self._validate_name(key) + return key in self.nodes + + def find_node( + self, name: Union[dns.name.Name, str], create: bool = False + ) -> dns.node.Node: + """Find a node in the zone, possibly creating it. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.node.Node``. + """ + + name = self._validate_name(name) + node = self.nodes.get(name) + if node is None: + if not create: + raise KeyError + node = self.node_factory() + self.nodes[name] = node + return node + + def get_node( + self, name: Union[dns.name.Name, str], create: bool = False + ) -> Optional[dns.node.Node]: + """Get a node in the zone, possibly creating it. + + This method is like ``find_node()``, except it returns None instead + of raising an exception if the node does not exist and creation + has not been requested. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.node.Node`` or ``None``. + """ + + try: + node = self.find_node(name, create) + except KeyError: + node = None + return node + + def delete_node(self, name: Union[dns.name.Name, str]) -> None: + """Delete the specified node if it exists. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + It is not an error if the node does not exist. + """ + + name = self._validate_name(name) + if name in self.nodes: + del self.nodes[name] + + def find_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + """Look for an rdataset with the specified name and type in the zone, + and return an rdataset encapsulating it. + + The rdataset returned is not a copy; changes to it will change + the zone. + + KeyError is raised if the name or type are not found. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str`` the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rdataset.Rdataset``. + """ + + name = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + node = self.find_node(name, create) + return node.find_rdataset(self.rdclass, rdtype, covers, create) + + def get_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + """Look for an rdataset with the specified name and type in the zone. + + This method is like ``find_rdataset()``, except it returns None instead + of raising an exception if the rdataset does not exist and creation + has not been requested. + + The rdataset returned is not a copy; changes to it will change + the zone. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rdataset.Rdataset`` or ``None``. + """ + + try: + rdataset = self.find_rdataset(name, rdtype, covers, create) + except KeyError: + rdataset = None + return rdataset + + def delete_rdataset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> None: + """Delete the rdataset matching *rdtype* and *covers*, if it + exists at the node specified by *name*. + + It is not an error if the node does not exist, or if there is no matching + rdataset at the node. + + If the node has no rdatasets after the deletion, it will itself be deleted. + + *name*: the name of the node to find. The value may be a ``dns.name.Name`` or a + ``str``. If absolute, the name must be a subdomain of the zone's origin. If + ``zone.relativize`` is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str`` or ``None``, the covered + type. Usually this value is ``dns.rdatatype.NONE``, but if the rdtype is + ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, then the covers value will be + the rdata type the SIG/RRSIG covers. The library treats the SIG and RRSIG types + as if they were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This + makes RRSIGs much easier to work with than if RRSIGs covering different rdata + types were aggregated into a single RRSIG rdataset. + """ + + name = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + node = self.get_node(name) + if node is not None: + node.delete_rdataset(self.rdclass, rdtype, covers) + if len(node) == 0: + self.delete_node(name) + + def replace_rdataset( + self, name: Union[dns.name.Name, str], replacement: dns.rdataset.Rdataset + ) -> None: + """Replace an rdataset at name. + + It is not an error if there is no rdataset matching I{replacement}. + + Ownership of the *replacement* object is transferred to the zone; + in other words, this method does not store a copy of *replacement* + at the node, it stores *replacement* itself. + + If the node does not exist, it is created. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *replacement*, a ``dns.rdataset.Rdataset``, the replacement rdataset. + """ + + if replacement.rdclass != self.rdclass: + raise ValueError("replacement.rdclass != zone.rdclass") + node = self.find_node(name, True) + node.replace_rdataset(replacement) + + def find_rrset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> dns.rrset.RRset: + """Look for an rdataset with the specified name and type in the zone, + and return an RRset encapsulating it. + + This method is less efficient than the similar + ``find_rdataset()`` because it creates an RRset instead of + returning the matching rdataset. It may be more convenient + for some uses since it returns an object which binds the owner + name to the rdataset. + + This method may not be used to create new nodes or rdatasets; + use ``find_rdataset`` instead. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdatatype.RdataType`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdatatype.RdataType`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rrset.RRset`` or ``None``. + """ + + vname = self._validate_name(name) + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + rdataset = self.nodes[vname].find_rdataset(self.rdclass, rdtype, covers) + rrset = dns.rrset.RRset(vname, self.rdclass, rdtype, covers) + rrset.update(rdataset) + return rrset + + def get_rrset( + self, + name: Union[dns.name.Name, str], + rdtype: Union[dns.rdatatype.RdataType, str], + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> Optional[dns.rrset.RRset]: + """Look for an rdataset with the specified name and type in the zone, + and return an RRset encapsulating it. + + This method is less efficient than the similar ``get_rdataset()`` + because it creates an RRset instead of returning the matching + rdataset. It may be more convenient for some uses since it + returns an object which binds the owner name to the rdataset. + + This method may not be used to create new nodes or rdatasets; + use ``get_rdataset()`` instead. + + *name*: the name of the node to find. + The value may be a ``dns.name.Name`` or a ``str``. If absolute, the + name must be a subdomain of the zone's origin. If ``zone.relativize`` + is ``True``, then the name will be relativized. + + *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + + *create*, a ``bool``. If true, the node will be created if it does + not exist. + + Raises ``KeyError`` if the name is not known and create was + not specified, or if the name was not a subdomain of the origin. + + Returns a ``dns.rrset.RRset`` or ``None``. + """ + + try: + rrset = self.find_rrset(name, rdtype, covers) + except KeyError: + rrset = None + return rrset + + def iterate_rdatasets( + self, + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.ANY, + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> Iterator[Tuple[dns.name.Name, dns.rdataset.Rdataset]]: + """Return a generator which yields (name, rdataset) tuples for + all rdatasets in the zone which have the specified *rdtype* + and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, + then all rdatasets will be matched. + + *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + for name, node in self.items(): + for rds in node: + if rdtype == dns.rdatatype.ANY or ( + rds.rdtype == rdtype and rds.covers == covers + ): + yield (name, rds) + + def iterate_rdatas( + self, + rdtype: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.ANY, + covers: Union[dns.rdatatype.RdataType, str] = dns.rdatatype.NONE, + ) -> Iterator[Tuple[dns.name.Name, int, dns.rdata.Rdata]]: + """Return a generator which yields (name, ttl, rdata) tuples for + all rdatas in the zone which have the specified *rdtype* + and *covers*. If *rdtype* is ``dns.rdatatype.ANY``, the default, + then all rdatas will be matched. + + *rdtype*, a ``dns.rdataset.Rdataset`` or ``str``, the rdata type desired. + + *covers*, a ``dns.rdataset.Rdataset`` or ``str``, the covered type. + Usually this value is ``dns.rdatatype.NONE``, but if the + rdtype is ``dns.rdatatype.SIG`` or ``dns.rdatatype.RRSIG``, + then the covers value will be the rdata type the SIG/RRSIG + covers. The library treats the SIG and RRSIG types as if they + were a family of types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). + This makes RRSIGs much easier to work with than if RRSIGs + covering different rdata types were aggregated into a single + RRSIG rdataset. + """ + + rdtype = dns.rdatatype.RdataType.make(rdtype) + covers = dns.rdatatype.RdataType.make(covers) + for name, node in self.items(): + for rds in node: + if rdtype == dns.rdatatype.ANY or ( + rds.rdtype == rdtype and rds.covers == covers + ): + for rdata in rds: + yield (name, rds.ttl, rdata) + + def to_file( + self, + f: Any, + sorted: bool = True, + relativize: bool = True, + nl: Optional[str] = None, + want_comments: bool = False, + want_origin: bool = False, + ) -> None: + """Write a zone to a file. + + *f*, a file or `str`. If *f* is a string, it is treated + as the name of a file to open. + + *sorted*, a ``bool``. If True, the default, then the file + will be written with the names sorted in DNSSEC order from + least to greatest. Otherwise the names will be written in + whatever order they happen to have in the zone's dictionary. + + *relativize*, a ``bool``. If True, the default, then domain + names in the output will be relativized to the zone's origin + if possible. + + *nl*, a ``str`` or None. The end of line string. If not + ``None``, the output will use the platform's native + end-of-line marker (i.e. LF on POSIX, CRLF on Windows). + + *want_comments*, a ``bool``. If ``True``, emit end-of-line comments + as part of writing the file. If ``False``, the default, do not + emit them. + + *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at + the start of the file. If ``False``, the default, do not emit + one. + """ + + if isinstance(f, str): + cm: contextlib.AbstractContextManager = open(f, "wb") + else: + cm = contextlib.nullcontext(f) + with cm as f: + # must be in this way, f.encoding may contain None, or even + # attribute may not be there + file_enc = getattr(f, "encoding", None) + if file_enc is None: + file_enc = "utf-8" + + if nl is None: + # binary mode, '\n' is not enough + nl_b = os.linesep.encode(file_enc) + nl = "\n" + elif isinstance(nl, str): + nl_b = nl.encode(file_enc) + else: + nl_b = nl + nl = nl.decode() + + if want_origin: + assert self.origin is not None + l = "$ORIGIN " + self.origin.to_text() + l_b = l.encode(file_enc) + try: + f.write(l_b) + f.write(nl_b) + except TypeError: # textual mode + f.write(l) + f.write(nl) + + if sorted: + names = list(self.keys()) + names.sort() + else: + names = self.keys() + for n in names: + l = self[n].to_text( + n, + origin=self.origin, + relativize=relativize, + want_comments=want_comments, + ) + l_b = l.encode(file_enc) + + try: + f.write(l_b) + f.write(nl_b) + except TypeError: # textual mode + f.write(l) + f.write(nl) + + def to_text( + self, + sorted: bool = True, + relativize: bool = True, + nl: Optional[str] = None, + want_comments: bool = False, + want_origin: bool = False, + ) -> str: + """Return a zone's text as though it were written to a file. + + *sorted*, a ``bool``. If True, the default, then the file + will be written with the names sorted in DNSSEC order from + least to greatest. Otherwise the names will be written in + whatever order they happen to have in the zone's dictionary. + + *relativize*, a ``bool``. If True, the default, then domain + names in the output will be relativized to the zone's origin + if possible. + + *nl*, a ``str`` or None. The end of line string. If not + ``None``, the output will use the platform's native + end-of-line marker (i.e. LF on POSIX, CRLF on Windows). + + *want_comments*, a ``bool``. If ``True``, emit end-of-line comments + as part of writing the file. If ``False``, the default, do not + emit them. + + *want_origin*, a ``bool``. If ``True``, emit a $ORIGIN line at + the start of the output. If ``False``, the default, do not emit + one. + + Returns a ``str``. + """ + temp_buffer = io.StringIO() + self.to_file(temp_buffer, sorted, relativize, nl, want_comments, want_origin) + return_value = temp_buffer.getvalue() + temp_buffer.close() + return return_value + + def check_origin(self) -> None: + """Do some simple checking of the zone's origin. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + """ + if self.relativize: + name = dns.name.empty + else: + assert self.origin is not None + name = self.origin + if self.get_rdataset(name, dns.rdatatype.SOA) is None: + raise NoSOA + if self.get_rdataset(name, dns.rdatatype.NS) is None: + raise NoNS + + def get_soa( + self, txn: Optional[dns.transaction.Transaction] = None + ) -> dns.rdtypes.ANY.SOA.SOA: + """Get the zone SOA rdata. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Returns a ``dns.rdtypes.ANY.SOA.SOA`` Rdata. + """ + if self.relativize: + origin_name = dns.name.empty + else: + if self.origin is None: + # get_soa() has been called very early, and there must not be + # an SOA if there is no origin. + raise NoSOA + origin_name = self.origin + soa: Optional[dns.rdataset.Rdataset] + if txn: + soa = txn.get(origin_name, dns.rdatatype.SOA) + else: + soa = self.get_rdataset(origin_name, dns.rdatatype.SOA) + if soa is None: + raise NoSOA + return soa[0] + + def _compute_digest( + self, + hash_algorithm: DigestHashAlgorithm, + scheme: DigestScheme = DigestScheme.SIMPLE, + ) -> bytes: + hashinfo = _digest_hashers.get(hash_algorithm) + if not hashinfo: + raise UnsupportedDigestHashAlgorithm + if scheme != DigestScheme.SIMPLE: + raise UnsupportedDigestScheme + + if self.relativize: + origin_name = dns.name.empty + else: + assert self.origin is not None + origin_name = self.origin + hasher = hashinfo() + for name, node in sorted(self.items()): + rrnamebuf = name.to_digestable(self.origin) + for rdataset in sorted(node, key=lambda rds: (rds.rdtype, rds.covers)): + if name == origin_name and dns.rdatatype.ZONEMD in ( + rdataset.rdtype, + rdataset.covers, + ): + continue + rrfixed = struct.pack( + "!HHI", rdataset.rdtype, rdataset.rdclass, rdataset.ttl + ) + rdatas = [rdata.to_digestable(self.origin) for rdata in rdataset] + for rdata in sorted(rdatas): + rrlen = struct.pack("!H", len(rdata)) + hasher.update(rrnamebuf + rrfixed + rrlen + rdata) + return hasher.digest() + + def compute_digest( + self, + hash_algorithm: DigestHashAlgorithm, + scheme: DigestScheme = DigestScheme.SIMPLE, + ) -> dns.rdtypes.ANY.ZONEMD.ZONEMD: + serial = self.get_soa().serial + digest = self._compute_digest(hash_algorithm, scheme) + return dns.rdtypes.ANY.ZONEMD.ZONEMD( + self.rdclass, dns.rdatatype.ZONEMD, serial, scheme, hash_algorithm, digest + ) + + def verify_digest( + self, zonemd: Optional[dns.rdtypes.ANY.ZONEMD.ZONEMD] = None + ) -> None: + digests: Union[dns.rdataset.Rdataset, List[dns.rdtypes.ANY.ZONEMD.ZONEMD]] + if zonemd: + digests = [zonemd] + else: + assert self.origin is not None + rds = self.get_rdataset(self.origin, dns.rdatatype.ZONEMD) + if rds is None: + raise NoDigest + digests = rds + for digest in digests: + try: + computed = self._compute_digest(digest.hash_algorithm, digest.scheme) + if computed == digest.digest: + return + except Exception: + pass + raise DigestVerificationFailure + + # TransactionManager methods + + def reader(self) -> "Transaction": + return Transaction(self, False, Version(self, 1, self.nodes, self.origin)) + + def writer(self, replacement: bool = False) -> "Transaction": + txn = Transaction(self, replacement) + txn._setup_version() + return txn + + def origin_information( + self, + ) -> Tuple[Optional[dns.name.Name], bool, Optional[dns.name.Name]]: + effective: Optional[dns.name.Name] + if self.relativize: + effective = dns.name.empty + else: + effective = self.origin + return (self.origin, self.relativize, effective) + + def get_class(self): + return self.rdclass + + # Transaction methods + + def _end_read(self, txn): + pass + + def _end_write(self, txn): + pass + + def _commit_version(self, _, version, origin): + self.nodes = version.nodes + if self.origin is None: + self.origin = origin + + def _get_next_version_id(self): + # Versions are ephemeral and all have id 1 + return 1 + + +# These classes used to be in dns.versioned, but have moved here so we can use +# the copy-on-write transaction mechanism for both kinds of zones. In a +# regular zone, the version only exists during the transaction, and the nodes +# are regular dns.node.Nodes. + +# A node with a version id. + + +class VersionedNode(dns.node.Node): # lgtm[py/missing-equals] + __slots__ = ["id"] + + def __init__(self): + super().__init__() + # A proper id will get set by the Version + self.id = 0 + + +@dns.immutable.immutable +class ImmutableVersionedNode(VersionedNode): + def __init__(self, node): + super().__init__() + self.id = node.id + self.rdatasets = tuple( + [dns.rdataset.ImmutableRdataset(rds) for rds in node.rdatasets] + ) + + def find_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> dns.rdataset.Rdataset: + if create: + raise TypeError("immutable") + return super().find_rdataset(rdclass, rdtype, covers, False) + + def get_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + create: bool = False, + ) -> Optional[dns.rdataset.Rdataset]: + if create: + raise TypeError("immutable") + return super().get_rdataset(rdclass, rdtype, covers, False) + + def delete_rdataset( + self, + rdclass: dns.rdataclass.RdataClass, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType = dns.rdatatype.NONE, + ) -> None: + raise TypeError("immutable") + + def replace_rdataset(self, replacement: dns.rdataset.Rdataset) -> None: + raise TypeError("immutable") + + def is_immutable(self) -> bool: + return True + + +class Version: + def __init__( + self, + zone: Zone, + id: int, + nodes: Optional[Dict[dns.name.Name, dns.node.Node]] = None, + origin: Optional[dns.name.Name] = None, + ): + self.zone = zone + self.id = id + if nodes is not None: + self.nodes = nodes + else: + self.nodes = {} + self.origin = origin + + def _validate_name(self, name: dns.name.Name) -> dns.name.Name: + if name.is_absolute(): + if self.origin is None: + # This should probably never happen as other code (e.g. + # _rr_line) will notice the lack of an origin before us, but + # we check just in case! + raise KeyError("no zone origin is defined") + if not name.is_subdomain(self.origin): + raise KeyError("name is not a subdomain of the zone origin") + if self.zone.relativize: + name = name.relativize(self.origin) + elif not self.zone.relativize: + # We have a relative name in a non-relative zone, so derelativize. + if self.origin is None: + raise KeyError("no zone origin is defined") + name = name.derelativize(self.origin) + return name + + def get_node(self, name: dns.name.Name) -> Optional[dns.node.Node]: + name = self._validate_name(name) + return self.nodes.get(name) + + def get_rdataset( + self, + name: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> Optional[dns.rdataset.Rdataset]: + node = self.get_node(name) + if node is None: + return None + return node.get_rdataset(self.zone.rdclass, rdtype, covers) + + def keys(self): + return self.nodes.keys() + + def items(self): + return self.nodes.items() + + +class WritableVersion(Version): + def __init__(self, zone: Zone, replacement: bool = False): + # The zone._versions_lock must be held by our caller in a versioned + # zone. + id = zone._get_next_version_id() + super().__init__(zone, id) + if not replacement: + # We copy the map, because that gives us a simple and thread-safe + # way of doing versions, and we have a garbage collector to help + # us. We only make new node objects if we actually change the + # node. + self.nodes.update(zone.nodes) + # We have to copy the zone origin as it may be None in the first + # version, and we don't want to mutate the zone until we commit. + self.origin = zone.origin + self.changed: Set[dns.name.Name] = set() + + def _maybe_cow(self, name: dns.name.Name) -> dns.node.Node: + name = self._validate_name(name) + node = self.nodes.get(name) + if node is None or name not in self.changed: + new_node = self.zone.node_factory() + if hasattr(new_node, "id"): + # We keep doing this for backwards compatibility, as earlier + # code used new_node.id != self.id for the "do we need to CoW?" + # test. Now we use the changed set as this works with both + # regular zones and versioned zones. + # + # We ignore the mypy error as this is safe but it doesn't see it. + new_node.id = self.id # type: ignore + if node is not None: + # moo! copy on write! + new_node.rdatasets.extend(node.rdatasets) + self.nodes[name] = new_node + self.changed.add(name) + return new_node + else: + return node + + def delete_node(self, name: dns.name.Name) -> None: + name = self._validate_name(name) + if name in self.nodes: + del self.nodes[name] + self.changed.add(name) + + def put_rdataset( + self, name: dns.name.Name, rdataset: dns.rdataset.Rdataset + ) -> None: + node = self._maybe_cow(name) + node.replace_rdataset(rdataset) + + def delete_rdataset( + self, + name: dns.name.Name, + rdtype: dns.rdatatype.RdataType, + covers: dns.rdatatype.RdataType, + ) -> None: + node = self._maybe_cow(name) + node.delete_rdataset(self.zone.rdclass, rdtype, covers) + if len(node) == 0: + del self.nodes[name] + + +@dns.immutable.immutable +class ImmutableVersion(Version): + def __init__(self, version: WritableVersion): + # We tell super() that it's a replacement as we don't want it + # to copy the nodes, as we're about to do that with an + # immutable Dict. + super().__init__(version.zone, True) + # set the right id! + self.id = version.id + # keep the origin + self.origin = version.origin + # Make changed nodes immutable + for name in version.changed: + node = version.nodes.get(name) + # it might not exist if we deleted it in the version + if node: + version.nodes[name] = ImmutableVersionedNode(node) + # We're changing the type of the nodes dictionary here on purpose, so + # we ignore the mypy error. + self.nodes = dns.immutable.Dict(version.nodes, True) # type: ignore + + +class Transaction(dns.transaction.Transaction): + def __init__(self, zone, replacement, version=None, make_immutable=False): + read_only = version is not None + super().__init__(zone, replacement, read_only) + self.version = version + self.make_immutable = make_immutable + + @property + def zone(self): + return self.manager + + def _setup_version(self): + assert self.version is None + self.version = WritableVersion(self.zone, self.replacement) + + def _get_rdataset(self, name, rdtype, covers): + return self.version.get_rdataset(name, rdtype, covers) + + def _put_rdataset(self, name, rdataset): + assert not self.read_only + self.version.put_rdataset(name, rdataset) + + def _delete_name(self, name): + assert not self.read_only + self.version.delete_node(name) + + def _delete_rdataset(self, name, rdtype, covers): + assert not self.read_only + self.version.delete_rdataset(name, rdtype, covers) + + def _name_exists(self, name): + return self.version.get_node(name) is not None + + def _changed(self): + if self.read_only: + return False + else: + return len(self.version.changed) > 0 + + def _end_transaction(self, commit): + if self.read_only: + self.zone._end_read(self) + elif commit and len(self.version.changed) > 0: + if self.make_immutable: + version = ImmutableVersion(self.version) + else: + version = self.version + self.zone._commit_version(self, version, self.version.origin) + else: + # rollback + self.zone._end_write(self) + + def _set_origin(self, origin): + if self.version.origin is None: + self.version.origin = origin + + def _iterate_rdatasets(self): + for name, node in self.version.items(): + for rdataset in node: + yield (name, rdataset) + + def _iterate_names(self): + return self.version.keys() + + def _get_node(self, name): + return self.version.get_node(name) + + def _origin_information(self): + (absolute, relativize, effective) = self.manager.origin_information() + if absolute is None and self.version.origin is not None: + # No origin has been committed yet, but we've learned one as part of + # this txn. Use it. + absolute = self.version.origin + if relativize: + effective = dns.name.empty + else: + effective = absolute + return (absolute, relativize, effective) + + +def from_text( + text: str, + origin: Optional[Union[dns.name.Name, str]] = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + zone_factory: Any = Zone, + filename: Optional[str] = None, + allow_include: bool = False, + check_origin: bool = True, + idna_codec: Optional[dns.name.IDNACodec] = None, + allow_directives: Union[bool, Iterable[str]] = True, +) -> Zone: + """Build a zone object from a zone file format string. + + *text*, a ``str``, the zone file format input. + + *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin + of the zone; if not specified, the first ``$ORIGIN`` statement in the + zone file will determine the origin of the zone. + + *rdclass*, a ``dns.rdataclass.RdataClass``, the zone's rdata class; the default is + class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *zone_factory*, the zone factory to use or ``None``. If ``None``, then + ``dns.zone.Zone`` will be used. The value may be any class or callable + that returns a subclass of ``dns.zone.Zone``. + + *filename*, a ``str`` or ``None``, the filename to emit when + describing where an error occurred; the default is ``'<string>'``. + + *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` + directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` + will raise a ``SyntaxError`` exception. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, + then directives are permitted, and the *allow_include* parameter controls whether + ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive + processing is done and any directive-like text will be treated as a regular owner + name. If a non-empty iterable, then only the listed directives (including the + ``$``) are allowed. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Returns a subclass of ``dns.zone.Zone``. + """ + + # 'text' can also be a file, but we don't publish that fact + # since it's an implementation detail. The official file + # interface is from_file(). + + if filename is None: + filename = "<string>" + zone = zone_factory(origin, rdclass, relativize=relativize) + with zone.writer(True) as txn: + tok = dns.tokenizer.Tokenizer(text, filename, idna_codec=idna_codec) + reader = dns.zonefile.Reader( + tok, + rdclass, + txn, + allow_include=allow_include, + allow_directives=allow_directives, + ) + try: + reader.read() + except dns.zonefile.UnknownOrigin: + # for backwards compatibility + raise dns.zone.UnknownOrigin + # Now that we're done reading, do some basic checking of the zone. + if check_origin: + zone.check_origin() + return zone + + +def from_file( + f: Any, + origin: Optional[Union[dns.name.Name, str]] = None, + rdclass: dns.rdataclass.RdataClass = dns.rdataclass.IN, + relativize: bool = True, + zone_factory: Any = Zone, + filename: Optional[str] = None, + allow_include: bool = True, + check_origin: bool = True, + idna_codec: Optional[dns.name.IDNACodec] = None, + allow_directives: Union[bool, Iterable[str]] = True, +) -> Zone: + """Read a zone file and build a zone object. + + *f*, a file or ``str``. If *f* is a string, it is treated + as the name of a file to open. + + *origin*, a ``dns.name.Name``, a ``str``, or ``None``. The origin + of the zone; if not specified, the first ``$ORIGIN`` statement in the + zone file will determine the origin of the zone. + + *rdclass*, an ``int``, the zone's rdata class; the default is class IN. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + + *zone_factory*, the zone factory to use or ``None``. If ``None``, then + ``dns.zone.Zone`` will be used. The value may be any class or callable + that returns a subclass of ``dns.zone.Zone``. + + *filename*, a ``str`` or ``None``, the filename to emit when + describing where an error occurred; the default is ``'<string>'``. + + *allow_include*, a ``bool``. If ``True``, the default, then ``$INCLUDE`` + directives are permitted. If ``False``, then encoutering a ``$INCLUDE`` + will raise a ``SyntaxError`` exception. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. + + *allow_directives*, a ``bool`` or an iterable of `str`. If ``True``, the default, + then directives are permitted, and the *allow_include* parameter controls whether + ``$INCLUDE`` is permitted. If ``False`` or an empty iterable, then no directive + processing is done and any directive-like text will be treated as a regular owner + name. If a non-empty iterable, then only the listed directives (including the + ``$``) are allowed. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Returns a subclass of ``dns.zone.Zone``. + """ + + if isinstance(f, str): + if filename is None: + filename = f + cm: contextlib.AbstractContextManager = open(f) + else: + cm = contextlib.nullcontext(f) + with cm as f: + return from_text( + f, + origin, + rdclass, + relativize, + zone_factory, + filename, + allow_include, + check_origin, + idna_codec, + allow_directives, + ) + assert False # make mypy happy lgtm[py/unreachable-statement] + + +def from_xfr( + xfr: Any, + zone_factory: Any = Zone, + relativize: bool = True, + check_origin: bool = True, +) -> Zone: + """Convert the output of a zone transfer generator into a zone object. + + *xfr*, a generator of ``dns.message.Message`` objects, typically + ``dns.query.xfr()``. + + *relativize*, a ``bool``, determine's whether domain names are + relativized to the zone's origin. The default is ``True``. + It is essential that the relativize setting matches the one specified + to the generator. + + *check_origin*, a ``bool``. If ``True``, the default, then sanity + checks of the origin node will be made by calling the zone's + ``check_origin()`` method. + + Raises ``dns.zone.NoSOA`` if there is no SOA RRset. + + Raises ``dns.zone.NoNS`` if there is no NS RRset. + + Raises ``KeyError`` if there is no origin node. + + Raises ``ValueError`` if no messages are yielded by the generator. + + Returns a subclass of ``dns.zone.Zone``. + """ + + z = None + for r in xfr: + if z is None: + if relativize: + origin = r.origin + else: + origin = r.answer[0].name + rdclass = r.answer[0].rdclass + z = zone_factory(origin, rdclass, relativize=relativize) + for rrset in r.answer: + znode = z.nodes.get(rrset.name) + if not znode: + znode = z.node_factory() + z.nodes[rrset.name] = znode + zrds = znode.find_rdataset(rrset.rdclass, rrset.rdtype, rrset.covers, True) + zrds.update_ttl(rrset.ttl) + for rd in rrset: + zrds.add(rd) + if z is None: + raise ValueError("empty transfer") + if check_origin: + z.check_origin() + return z diff --git a/backend/test/lib/python3.8/site-packages/dns/zonefile.py b/backend/test/lib/python3.8/site-packages/dns/zonefile.py new file mode 100644 index 0000000000000000000000000000000000000000..27f04924ff0f977e5496d1baa1c8f50481464a91 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/zonefile.py @@ -0,0 +1,747 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose with or without fee is hereby granted, +# provided that the above copyright notice and this permission notice +# appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""DNS Zones.""" + +import re +import sys +from typing import Any, Iterable, List, Optional, Set, Tuple, Union + +import dns.exception +import dns.grange +import dns.name +import dns.node +import dns.rdata +import dns.rdataclass +import dns.rdatatype +import dns.rdtypes.ANY.SOA +import dns.rrset +import dns.tokenizer +import dns.transaction +import dns.ttl + + +class UnknownOrigin(dns.exception.DNSException): + """Unknown origin""" + + +class CNAMEAndOtherData(dns.exception.DNSException): + """A node has a CNAME and other data""" + + +def _check_cname_and_other_data(txn, name, rdataset): + rdataset_kind = dns.node.NodeKind.classify_rdataset(rdataset) + node = txn.get_node(name) + if node is None: + # empty nodes are neutral. + return + node_kind = node.classify() + if ( + node_kind == dns.node.NodeKind.CNAME + and rdataset_kind == dns.node.NodeKind.REGULAR + ): + raise CNAMEAndOtherData("rdataset type is not compatible with a CNAME node") + elif ( + node_kind == dns.node.NodeKind.REGULAR + and rdataset_kind == dns.node.NodeKind.CNAME + ): + raise CNAMEAndOtherData( + "CNAME rdataset is not compatible with a regular data node" + ) + # Otherwise at least one of the node and the rdataset is neutral, so + # adding the rdataset is ok + + +SavedStateType = Tuple[ + dns.tokenizer.Tokenizer, + Optional[dns.name.Name], # current_origin + Optional[dns.name.Name], # last_name + Optional[Any], # current_file + int, # last_ttl + bool, # last_ttl_known + int, # default_ttl + bool, +] # default_ttl_known + + +def _upper_dollarize(s): + s = s.upper() + if not s.startswith("$"): + s = "$" + s + return s + + +class Reader: + + """Read a DNS zone file into a transaction.""" + + def __init__( + self, + tok: dns.tokenizer.Tokenizer, + rdclass: dns.rdataclass.RdataClass, + txn: dns.transaction.Transaction, + allow_include: bool = False, + allow_directives: Union[bool, Iterable[str]] = True, + force_name: Optional[dns.name.Name] = None, + force_ttl: Optional[int] = None, + force_rdclass: Optional[dns.rdataclass.RdataClass] = None, + force_rdtype: Optional[dns.rdatatype.RdataType] = None, + default_ttl: Optional[int] = None, + ): + self.tok = tok + (self.zone_origin, self.relativize, _) = txn.manager.origin_information() + self.current_origin = self.zone_origin + self.last_ttl = 0 + self.last_ttl_known = False + if force_ttl is not None: + default_ttl = force_ttl + if default_ttl is None: + self.default_ttl = 0 + self.default_ttl_known = False + else: + self.default_ttl = default_ttl + self.default_ttl_known = True + self.last_name = self.current_origin + self.zone_rdclass = rdclass + self.txn = txn + self.saved_state: List[SavedStateType] = [] + self.current_file: Optional[Any] = None + self.allowed_directives: Set[str] + if allow_directives is True: + self.allowed_directives = {"$GENERATE", "$ORIGIN", "$TTL"} + if allow_include: + self.allowed_directives.add("$INCLUDE") + elif allow_directives is False: + # allow_include was ignored in earlier releases if allow_directives was + # False, so we continue that. + self.allowed_directives = set() + else: + # Note that if directives are explicitly specified, then allow_include + # is ignored. + self.allowed_directives = set(_upper_dollarize(d) for d in allow_directives) + self.force_name = force_name + self.force_ttl = force_ttl + self.force_rdclass = force_rdclass + self.force_rdtype = force_rdtype + self.txn.check_put_rdataset(_check_cname_and_other_data) + + def _eat_line(self): + while 1: + token = self.tok.get() + if token.is_eol_or_eof(): + break + + def _get_identifier(self): + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + return token + + def _rr_line(self): + """Process one line from a DNS zone file.""" + token = None + # Name + if self.force_name is not None: + name = self.force_name + else: + if self.current_origin is None: + raise UnknownOrigin + token = self.tok.get(want_leading=True) + if not token.is_whitespace(): + self.last_name = self.tok.as_name(token, self.current_origin) + else: + token = self.tok.get() + if token.is_eol_or_eof(): + # treat leading WS followed by EOL/EOF as if they were EOL/EOF. + return + self.tok.unget(token) + name = self.last_name + if not name.is_subdomain(self.zone_origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone_origin) + + # TTL + if self.force_ttl is not None: + ttl = self.force_ttl + self.last_ttl = ttl + self.last_ttl_known = True + else: + token = self._get_identifier() + ttl = None + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = None + except dns.ttl.BadTTL: + self.tok.unget(token) + + # Class + if self.force_rdclass is not None: + rdclass = self.force_rdclass + else: + token = self._get_identifier() + try: + rdclass = dns.rdataclass.from_text(token.value) + except dns.exception.SyntaxError: + raise + except Exception: + rdclass = self.zone_rdclass + self.tok.unget(token) + if rdclass != self.zone_rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + + if ttl is None: + # support for <class> <ttl> <type> syntax + token = self._get_identifier() + ttl = None + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = None + except dns.ttl.BadTTL: + if self.default_ttl_known: + ttl = self.default_ttl + elif self.last_ttl_known: + ttl = self.last_ttl + self.tok.unget(token) + + # Type + if self.force_rdtype is not None: + rdtype = self.force_rdtype + else: + token = self._get_identifier() + try: + rdtype = dns.rdatatype.from_text(token.value) + except Exception: + raise dns.exception.SyntaxError("unknown rdatatype '%s'" % token.value) + + try: + rd = dns.rdata.from_text( + rdclass, + rdtype, + self.tok, + self.current_origin, + self.relativize, + self.zone_origin, + ) + except dns.exception.SyntaxError: + # Catch and reraise. + raise + except Exception: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError( + "caught exception {}: {}".format(str(ty), str(va)) + ) + + if not self.default_ttl_known and rdtype == dns.rdatatype.SOA: + # The pre-RFC2308 and pre-BIND9 behavior inherits the zone default + # TTL from the SOA minttl if no $TTL statement is present before the + # SOA is parsed. + self.default_ttl = rd.minimum + self.default_ttl_known = True + if ttl is None: + # if we didn't have a TTL on the SOA, set it! + ttl = rd.minimum + + # TTL check. We had to wait until now to do this as the SOA RR's + # own TTL can be inferred from its minimum. + if ttl is None: + raise dns.exception.SyntaxError("Missing default TTL value") + + self.txn.add(name, ttl, rd) + + def _parse_modify(self, side: str) -> Tuple[str, str, int, int, str]: + # Here we catch everything in '{' '}' in a group so we can replace it + # with ''. + is_generate1 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+),(.)}).*$") + is_generate2 = re.compile(r"^.*\$({(\+|-?)(\d+)}).*$") + is_generate3 = re.compile(r"^.*\$({(\+|-?)(\d+),(\d+)}).*$") + # Sometimes there are modifiers in the hostname. These come after + # the dollar sign. They are in the form: ${offset[,width[,base]]}. + # Make names + g1 = is_generate1.match(side) + if g1: + mod, sign, offset, width, base = g1.groups() + if sign == "": + sign = "+" + g2 = is_generate2.match(side) + if g2: + mod, sign, offset = g2.groups() + if sign == "": + sign = "+" + width = 0 + base = "d" + g3 = is_generate3.match(side) + if g3: + mod, sign, offset, width = g3.groups() + if sign == "": + sign = "+" + base = "d" + + if not (g1 or g2 or g3): + mod = "" + sign = "+" + offset = 0 + width = 0 + base = "d" + + offset = int(offset) + width = int(width) + + if sign not in ["+", "-"]: + raise dns.exception.SyntaxError("invalid offset sign %s" % sign) + if base not in ["d", "o", "x", "X", "n", "N"]: + raise dns.exception.SyntaxError("invalid type %s" % base) + + return mod, sign, offset, width, base + + def _generate_line(self): + # range lhs [ttl] [class] type rhs [ comment ] + """Process one line containing the GENERATE statement from a DNS + zone file.""" + if self.current_origin is None: + raise UnknownOrigin + + token = self.tok.get() + # Range (required) + try: + start, stop, step = dns.grange.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError + + # lhs (required) + try: + lhs = token.value + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError + + # TTL + try: + ttl = dns.ttl.from_text(token.value) + self.last_ttl = ttl + self.last_ttl_known = True + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.ttl.BadTTL: + if not (self.last_ttl_known or self.default_ttl_known): + raise dns.exception.SyntaxError("Missing default TTL value") + if self.default_ttl_known: + ttl = self.default_ttl + elif self.last_ttl_known: + ttl = self.last_ttl + # Class + try: + rdclass = dns.rdataclass.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except dns.exception.SyntaxError: + raise dns.exception.SyntaxError + except Exception: + rdclass = self.zone_rdclass + if rdclass != self.zone_rdclass: + raise dns.exception.SyntaxError("RR class is not zone's class") + # Type + try: + rdtype = dns.rdatatype.from_text(token.value) + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError + except Exception: + raise dns.exception.SyntaxError("unknown rdatatype '%s'" % token.value) + + # rhs (required) + rhs = token.value + + def _calculate_index(counter: int, offset_sign: str, offset: int) -> int: + """Calculate the index from the counter and offset.""" + if offset_sign == "-": + offset *= -1 + return counter + offset + + def _format_index(index: int, base: str, width: int) -> str: + """Format the index with the given base, and zero-fill it + to the given width.""" + if base in ["d", "o", "x", "X"]: + return format(index, base).zfill(width) + + # base can only be n or N here + hexa = _format_index(index, "x", width) + nibbles = ".".join(hexa[::-1])[:width] + if base == "N": + nibbles = nibbles.upper() + return nibbles + + lmod, lsign, loffset, lwidth, lbase = self._parse_modify(lhs) + rmod, rsign, roffset, rwidth, rbase = self._parse_modify(rhs) + for i in range(start, stop + 1, step): + # +1 because bind is inclusive and python is exclusive + + lindex = _calculate_index(i, lsign, loffset) + rindex = _calculate_index(i, rsign, roffset) + + lzfindex = _format_index(lindex, lbase, lwidth) + rzfindex = _format_index(rindex, rbase, rwidth) + + name = lhs.replace("$%s" % (lmod), lzfindex) + rdata = rhs.replace("$%s" % (rmod), rzfindex) + + self.last_name = dns.name.from_text( + name, self.current_origin, self.tok.idna_codec + ) + name = self.last_name + if not name.is_subdomain(self.zone_origin): + self._eat_line() + return + if self.relativize: + name = name.relativize(self.zone_origin) + + try: + rd = dns.rdata.from_text( + rdclass, + rdtype, + rdata, + self.current_origin, + self.relativize, + self.zone_origin, + ) + except dns.exception.SyntaxError: + # Catch and reraise. + raise + except Exception: + # All exceptions that occur in the processing of rdata + # are treated as syntax errors. This is not strictly + # correct, but it is correct almost all of the time. + # We convert them to syntax errors so that we can emit + # helpful filename:line info. + (ty, va) = sys.exc_info()[:2] + raise dns.exception.SyntaxError( + "caught exception %s: %s" % (str(ty), str(va)) + ) + + self.txn.add(name, ttl, rd) + + def read(self) -> None: + """Read a DNS zone file and build a zone object. + + @raises dns.zone.NoSOA: No SOA RR was found at the zone origin + @raises dns.zone.NoNS: No NS RRset was found at the zone origin + """ + + try: + while 1: + token = self.tok.get(True, True) + if token.is_eof(): + if self.current_file is not None: + self.current_file.close() + if len(self.saved_state) > 0: + ( + self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known, + ) = self.saved_state.pop(-1) + continue + break + elif token.is_eol(): + continue + elif token.is_comment(): + self.tok.get_eol() + continue + elif token.value[0] == "$" and len(self.allowed_directives) > 0: + # Note that we only run directive processing code if at least + # one directive is allowed in order to be backwards compatible + c = token.value.upper() + if c not in self.allowed_directives: + raise dns.exception.SyntaxError( + f"zone file directive '{c}' is not allowed" + ) + if c == "$TTL": + token = self.tok.get() + if not token.is_identifier(): + raise dns.exception.SyntaxError("bad $TTL") + self.default_ttl = dns.ttl.from_text(token.value) + self.default_ttl_known = True + self.tok.get_eol() + elif c == "$ORIGIN": + self.current_origin = self.tok.get_name() + self.tok.get_eol() + if self.zone_origin is None: + self.zone_origin = self.current_origin + self.txn._set_origin(self.current_origin) + elif c == "$INCLUDE": + token = self.tok.get() + filename = token.value + token = self.tok.get() + new_origin: Optional[dns.name.Name] + if token.is_identifier(): + new_origin = dns.name.from_text( + token.value, self.current_origin, self.tok.idna_codec + ) + self.tok.get_eol() + elif not token.is_eol_or_eof(): + raise dns.exception.SyntaxError("bad origin in $INCLUDE") + else: + new_origin = self.current_origin + self.saved_state.append( + ( + self.tok, + self.current_origin, + self.last_name, + self.current_file, + self.last_ttl, + self.last_ttl_known, + self.default_ttl, + self.default_ttl_known, + ) + ) + self.current_file = open(filename, "r") + self.tok = dns.tokenizer.Tokenizer(self.current_file, filename) + self.current_origin = new_origin + elif c == "$GENERATE": + self._generate_line() + else: + raise dns.exception.SyntaxError( + f"Unknown zone file directive '{c}'" + ) + continue + self.tok.unget(token) + self._rr_line() + except dns.exception.SyntaxError as detail: + (filename, line_number) = self.tok.where() + if detail is None: + detail = "syntax error" + ex = dns.exception.SyntaxError( + "%s:%d: %s" % (filename, line_number, detail) + ) + tb = sys.exc_info()[2] + raise ex.with_traceback(tb) from None + + +class RRsetsReaderTransaction(dns.transaction.Transaction): + def __init__(self, manager, replacement, read_only): + assert not read_only + super().__init__(manager, replacement, read_only) + self.rdatasets = {} + + def _get_rdataset(self, name, rdtype, covers): + return self.rdatasets.get((name, rdtype, covers)) + + def _get_node(self, name): + rdatasets = [] + for (rdataset_name, _, _), rdataset in self.rdatasets.items(): + if name == rdataset_name: + rdatasets.append(rdataset) + if len(rdatasets) == 0: + return None + node = dns.node.Node() + node.rdatasets = rdatasets + return node + + def _put_rdataset(self, name, rdataset): + self.rdatasets[(name, rdataset.rdtype, rdataset.covers)] = rdataset + + def _delete_name(self, name): + # First remove any changes involving the name + remove = [] + for key in self.rdatasets: + if key[0] == name: + remove.append(key) + if len(remove) > 0: + for key in remove: + del self.rdatasets[key] + + def _delete_rdataset(self, name, rdtype, covers): + try: + del self.rdatasets[(name, rdtype, covers)] + except KeyError: + pass + + def _name_exists(self, name): + for n, _, _ in self.rdatasets: + if n == name: + return True + return False + + def _changed(self): + return len(self.rdatasets) > 0 + + def _end_transaction(self, commit): + if commit and self._changed(): + rrsets = [] + for (name, _, _), rdataset in self.rdatasets.items(): + rrset = dns.rrset.RRset( + name, rdataset.rdclass, rdataset.rdtype, rdataset.covers + ) + rrset.update(rdataset) + rrsets.append(rrset) + self.manager.set_rrsets(rrsets) + + def _set_origin(self, origin): + pass + + def _iterate_rdatasets(self): + raise NotImplementedError # pragma: no cover + + def _iterate_names(self): + raise NotImplementedError # pragma: no cover + + +class RRSetsReaderManager(dns.transaction.TransactionManager): + def __init__( + self, origin=dns.name.root, relativize=False, rdclass=dns.rdataclass.IN + ): + self.origin = origin + self.relativize = relativize + self.rdclass = rdclass + self.rrsets = [] + + def reader(self): # pragma: no cover + raise NotImplementedError + + def writer(self, replacement=False): + assert replacement is True + return RRsetsReaderTransaction(self, True, False) + + def get_class(self): + return self.rdclass + + def origin_information(self): + if self.relativize: + effective = dns.name.empty + else: + effective = self.origin + return (self.origin, self.relativize, effective) + + def set_rrsets(self, rrsets): + self.rrsets = rrsets + + +def read_rrsets( + text: Any, + name: Optional[Union[dns.name.Name, str]] = None, + ttl: Optional[int] = None, + rdclass: Optional[Union[dns.rdataclass.RdataClass, str]] = dns.rdataclass.IN, + default_rdclass: Union[dns.rdataclass.RdataClass, str] = dns.rdataclass.IN, + rdtype: Optional[Union[dns.rdatatype.RdataType, str]] = None, + default_ttl: Optional[Union[int, str]] = None, + idna_codec: Optional[dns.name.IDNACodec] = None, + origin: Optional[Union[dns.name.Name, str]] = dns.name.root, + relativize: bool = False, +) -> List[dns.rrset.RRset]: + """Read one or more rrsets from the specified text, possibly subject + to restrictions. + + *text*, a file object or a string, is the input to process. + + *name*, a string, ``dns.name.Name``, or ``None``, is the owner name of + the rrset. If not ``None``, then the owner name is "forced", and the + input must not specify an owner name. If ``None``, then any owner names + are allowed and must be present in the input. + + *ttl*, an ``int``, string, or None. If not ``None``, the the TTL is + forced to be the specified value and the input must not specify a TTL. + If ``None``, then a TTL may be specified in the input. If it is not + specified, then the *default_ttl* will be used. + + *rdclass*, a ``dns.rdataclass.RdataClass``, string, or ``None``. If + not ``None``, then the class is forced to the specified value, and the + input must not specify a class. If ``None``, then the input may specify + a class that matches *default_rdclass*. Note that it is not possible to + return rrsets with differing classes; specifying ``None`` for the class + simply allows the user to optionally type a class as that may be convenient + when cutting and pasting. + + *default_rdclass*, a ``dns.rdataclass.RdataClass`` or string. The class + of the returned rrsets. + + *rdtype*, a ``dns.rdatatype.RdataType``, string, or ``None``. If not + ``None``, then the type is forced to the specified value, and the + input must not specify a type. If ``None``, then a type must be present + for each RR. + + *default_ttl*, an ``int``, string, or ``None``. If not ``None``, then if + the TTL is not forced and is not specified, then this value will be used. + if ``None``, then if the TTL is not forced an error will occur if the TTL + is not specified. + + *idna_codec*, a ``dns.name.IDNACodec``, specifies the IDNA + encoder/decoder. If ``None``, the default IDNA 2003 encoder/decoder + is used. Note that codecs only apply to the owner name; dnspython does + not do IDNA for names in rdata, as there is no IDNA zonefile format. + + *origin*, a string, ``dns.name.Name``, or ``None``, is the origin for any + relative names in the input, and also the origin to relativize to if + *relativize* is ``True``. + + *relativize*, a bool. If ``True``, names are relativized to the *origin*; + if ``False`` then any relative names in the input are made absolute by + appending the *origin*. + """ + if isinstance(origin, str): + origin = dns.name.from_text(origin, dns.name.root, idna_codec) + if isinstance(name, str): + name = dns.name.from_text(name, origin, idna_codec) + if isinstance(ttl, str): + ttl = dns.ttl.from_text(ttl) + if isinstance(default_ttl, str): + default_ttl = dns.ttl.from_text(default_ttl) + if rdclass is not None: + rdclass = dns.rdataclass.RdataClass.make(rdclass) + else: + rdclass = None + default_rdclass = dns.rdataclass.RdataClass.make(default_rdclass) + if rdtype is not None: + rdtype = dns.rdatatype.RdataType.make(rdtype) + else: + rdtype = None + manager = RRSetsReaderManager(origin, relativize, default_rdclass) + with manager.writer(True) as txn: + tok = dns.tokenizer.Tokenizer(text, "<input>", idna_codec=idna_codec) + reader = Reader( + tok, + default_rdclass, + txn, + allow_directives=False, + force_name=name, + force_ttl=ttl, + force_rdclass=rdclass, + force_rdtype=rdtype, + default_ttl=default_ttl, + ) + reader.read() + return manager.rrsets diff --git a/backend/test/lib/python3.8/site-packages/dns/zonetypes.py b/backend/test/lib/python3.8/site-packages/dns/zonetypes.py new file mode 100644 index 0000000000000000000000000000000000000000..195ee2ec9b5f62e15d27f196d5f4244f4290f0b4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dns/zonetypes.py @@ -0,0 +1,37 @@ +# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license + +"""Common zone-related types.""" + +# This is a separate file to avoid import circularity between dns.zone and +# the implementation of the ZONEMD type. + +import hashlib + +import dns.enum + + +class DigestScheme(dns.enum.IntEnum): + """ZONEMD Scheme""" + + SIMPLE = 1 + + @classmethod + def _maximum(cls): + return 255 + + +class DigestHashAlgorithm(dns.enum.IntEnum): + """ZONEMD Hash Algorithm""" + + SHA384 = 1 + SHA512 = 2 + + @classmethod + def _maximum(cls): + return 255 + + +_digest_hashers = { + DigestHashAlgorithm.SHA384: hashlib.sha384, + DigestHashAlgorithm.SHA512: hashlib.sha512, +} diff --git a/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/LICENSE b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..390a726dceb02e67bae01849000eb44632f01703 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/LICENSE @@ -0,0 +1,35 @@ +ISC License + +Copyright (C) Dnspython Contributors + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all +copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL +DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR +PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + + +Copyright (C) 2001-2017 Nominum, Inc. +Copyright (C) Google Inc. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose with or without fee is hereby granted, +provided that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..a69819a3fa370e610e5abeca707d2d6f9ef30190 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/METADATA @@ -0,0 +1,128 @@ +Metadata-Version: 2.1 +Name: dnspython +Version: 2.4.2 +Summary: DNS toolkit +Home-page: https://www.dnspython.org +License: ISC +Author: Bob Halley +Author-email: halley@dnspython.org +Requires-Python: >=3.8,<4.0 +Classifier: License :: OSI Approved +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Provides-Extra: dnssec +Provides-Extra: doh +Provides-Extra: doq +Provides-Extra: idna +Provides-Extra: trio +Provides-Extra: wmi +Requires-Dist: aioquic (>=0.9.20) ; extra == "doq" +Requires-Dist: cryptography (>=2.6,<42.0) ; extra == "dnssec" +Requires-Dist: h2 (>=4.1.0) ; extra == "doh" +Requires-Dist: httpcore (>=0.17.3) ; extra == "doh" +Requires-Dist: httpx (>=0.24.1) ; extra == "doh" +Requires-Dist: idna (>=2.1,<4.0) ; extra == "idna" +Requires-Dist: trio (>=0.14,<0.23) ; extra == "trio" +Requires-Dist: wmi (>=1.5.1,<2.0.0) ; extra == "wmi" +Project-URL: Bug Tracker, https://github.com/rthalley/dnspython/issues +Project-URL: Documentation, https://dnspython.readthedocs.io/en/stable/ +Project-URL: Repository, https://github.com/rthalley/dnspython.git +Description-Content-Type: text/markdown + +# dnspython + +[](https://github.com/rthalley/dnspython/actions/) +[](https://dnspython.readthedocs.io/en/latest/?badge=latest) +[](https://badge.fury.io/py/dnspython) +[](https://opensource.org/licenses/ISC) +[](https://codecov.io/github/rthalley/dnspython) +[](https://github.com/psf/black) + +## INTRODUCTION + +dnspython is a DNS toolkit for Python. It supports almost all record types. It +can be used for queries, zone transfers, and dynamic updates. It supports TSIG +authenticated messages and EDNS0. + +dnspython provides both high and low level access to DNS. The high level classes +perform queries for data of a given name, type, and class, and return an answer +set. The low level classes allow direct manipulation of DNS zones, messages, +names, and records. + +To see a few of the ways dnspython can be used, look in the `examples/` +directory. + +dnspython is a utility to work with DNS, `/etc/hosts` is thus not used. For +simple forward DNS lookups, it's better to use `socket.getaddrinfo()` or +`socket.gethostbyname()`. + +dnspython originated at Nominum where it was developed +to facilitate the testing of DNS software. + +## ABOUT THIS RELEASE + +This is dnspython 2.4.2. +Please read +[What's New](https://dnspython.readthedocs.io/en/stable/whatsnew.html) for +information about the changes in this release. + +## INSTALLATION + +* Many distributions have dnspython packaged for you, so you should + check there first. +* To use a wheel downloaded from PyPi, run: + + pip install dnspython + +* To install from the source code, go into the top-level of the source code + and run: + +``` + pip install --upgrade pip build + python -m build + pip install dist/*.whl +``` + +* To install the latest from the master branch, run `pip install git+https://github.com/rthalley/dnspython.git` + +Dnspython's default installation does not depend on any modules other than +those in the Python standard library. To use some features, additional modules +must be installed. For convenience, pip options are defined for the +requirements. + +If you want to use DNS-over-HTTPS, run +`pip install dnspython[doh]`. + +If you want to use DNSSEC functionality, run +`pip install dnspython[dnssec]`. + +If you want to use internationalized domain names (IDNA) +functionality, you must run +`pip install dnspython[idna]` + +If you want to use the Trio asynchronous I/O package, run +`pip install dnspython[trio]`. + +If you want to use WMI on Windows to determine the active DNS settings +instead of the default registry scanning method, run +`pip install dnspython[wmi]`. + +If you want to try the experimental DNS-over-QUIC code, run +`pip install dnspython[doq]`. + +Note that you can install any combination of the above, e.g.: +`pip install dnspython[doh,dnssec,idna]` + +### Notices + +Python 2.x support ended with the release of 1.16.0. Dnspython 2.0.0 through +2.2.x support Python 3.6 and later. For dnspython 2.3.x, the minimum +supported Python version is 3.7, and for 2.4.x the minimum supported verison is 3.8. +We plan to align future support with the lifetime of the Python 3 versions. + +Documentation has moved to +[dnspython.readthedocs.io](https://dnspython.readthedocs.io). + diff --git a/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..562387f17efdd290cb9f0110266433dd9d616982 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/RECORD @@ -0,0 +1,288 @@ +dns/__init__.py,sha256=YJZtDG14Idw5ui3h1nWooSwPM9gsxQgB8M0GBZ3aly0,1663 +dns/__pycache__/__init__.cpython-38.pyc,, +dns/__pycache__/_asyncbackend.cpython-38.pyc,, +dns/__pycache__/_asyncio_backend.cpython-38.pyc,, +dns/__pycache__/_ddr.cpython-38.pyc,, +dns/__pycache__/_immutable_ctx.cpython-38.pyc,, +dns/__pycache__/_trio_backend.cpython-38.pyc,, +dns/__pycache__/asyncbackend.cpython-38.pyc,, +dns/__pycache__/asyncquery.cpython-38.pyc,, +dns/__pycache__/asyncresolver.cpython-38.pyc,, +dns/__pycache__/dnssec.cpython-38.pyc,, +dns/__pycache__/dnssectypes.cpython-38.pyc,, +dns/__pycache__/e164.cpython-38.pyc,, +dns/__pycache__/edns.cpython-38.pyc,, +dns/__pycache__/entropy.cpython-38.pyc,, +dns/__pycache__/enum.cpython-38.pyc,, +dns/__pycache__/exception.cpython-38.pyc,, +dns/__pycache__/flags.cpython-38.pyc,, +dns/__pycache__/grange.cpython-38.pyc,, +dns/__pycache__/immutable.cpython-38.pyc,, +dns/__pycache__/inet.cpython-38.pyc,, +dns/__pycache__/ipv4.cpython-38.pyc,, +dns/__pycache__/ipv6.cpython-38.pyc,, +dns/__pycache__/message.cpython-38.pyc,, +dns/__pycache__/name.cpython-38.pyc,, +dns/__pycache__/namedict.cpython-38.pyc,, +dns/__pycache__/nameserver.cpython-38.pyc,, +dns/__pycache__/node.cpython-38.pyc,, +dns/__pycache__/opcode.cpython-38.pyc,, +dns/__pycache__/query.cpython-38.pyc,, +dns/__pycache__/rcode.cpython-38.pyc,, +dns/__pycache__/rdata.cpython-38.pyc,, +dns/__pycache__/rdataclass.cpython-38.pyc,, +dns/__pycache__/rdataset.cpython-38.pyc,, +dns/__pycache__/rdatatype.cpython-38.pyc,, +dns/__pycache__/renderer.cpython-38.pyc,, +dns/__pycache__/resolver.cpython-38.pyc,, +dns/__pycache__/reversename.cpython-38.pyc,, +dns/__pycache__/rrset.cpython-38.pyc,, +dns/__pycache__/serial.cpython-38.pyc,, +dns/__pycache__/set.cpython-38.pyc,, +dns/__pycache__/tokenizer.cpython-38.pyc,, +dns/__pycache__/transaction.cpython-38.pyc,, +dns/__pycache__/tsig.cpython-38.pyc,, +dns/__pycache__/tsigkeyring.cpython-38.pyc,, +dns/__pycache__/ttl.cpython-38.pyc,, +dns/__pycache__/update.cpython-38.pyc,, +dns/__pycache__/version.cpython-38.pyc,, +dns/__pycache__/versioned.cpython-38.pyc,, +dns/__pycache__/win32util.cpython-38.pyc,, +dns/__pycache__/wire.cpython-38.pyc,, +dns/__pycache__/xfr.cpython-38.pyc,, +dns/__pycache__/zone.cpython-38.pyc,, +dns/__pycache__/zonefile.cpython-38.pyc,, +dns/__pycache__/zonetypes.cpython-38.pyc,, +dns/_asyncbackend.py,sha256=Ny0kGesm9wbLBnt-0u-tANOKsxcYt2jbMuRoRz_JZUA,2360 +dns/_asyncio_backend.py,sha256=Il3f2OGH0Q2EnToE4F52p-FQFrIw3WgpsyNZG1JNUFQ,8970 +dns/_ddr.py,sha256=rHXKC8kncCTT9N4KBh1flicl79nyDjQ-DDvq30MJ3B8,5247 +dns/_immutable_ctx.py,sha256=gtoCLMmdHXI23zt5lRSIS3A4Ca3jZJngebdoFFOtiwU,2459 +dns/_trio_backend.py,sha256=GVVnbYqhUyvKbwMiDH_8YSZ2B_XYyjlZ6dRoGAYD5vw,8150 +dns/asyncbackend.py,sha256=F8YnEkFKXAhJ-2DM3_EQ1o7UGq-8FpEj4arSyMvqkO8,2794 +dns/asyncquery.py,sha256=AZ4nIpOMzZJfIb-Y8Ba0We6VQ7kTU7qFQtS5NP_lazw,26057 +dns/asyncresolver.py,sha256=GD86dCyW9YGKs6SggWXwBKEXifW7Qdx4cEAGFKY6fA4,17852 +dns/dnssec.py,sha256=uk3eiNbhHR8OmZJpqh2NLrBRV2329RukbuFNSC0whpA,40663 +dns/dnssecalgs/__init__.py,sha256=qU3J8IH7DiGMGHOoXLM7cPdbm9sXVkUcHjK9rnPtSPM,4270 +dns/dnssecalgs/__pycache__/__init__.cpython-38.pyc,, +dns/dnssecalgs/__pycache__/base.cpython-38.pyc,, +dns/dnssecalgs/__pycache__/cryptography.cpython-38.pyc,, +dns/dnssecalgs/__pycache__/dsa.cpython-38.pyc,, +dns/dnssecalgs/__pycache__/ecdsa.cpython-38.pyc,, +dns/dnssecalgs/__pycache__/eddsa.cpython-38.pyc,, +dns/dnssecalgs/__pycache__/rsa.cpython-38.pyc,, +dns/dnssecalgs/base.py,sha256=hsFHFr_eCYeDcI0eU6_WiLlOYL0GR4QJ__sXoMrIAfE,2446 +dns/dnssecalgs/cryptography.py,sha256=3uqMfRm-zCkJPOrxUqlu9CmdxIMy71dVor9eAHi0wZM,2425 +dns/dnssecalgs/dsa.py,sha256=hklh_HkT_ZffQBHQ7t6pKUStTH4x5nXlz8R9RUP72aY,3497 +dns/dnssecalgs/ecdsa.py,sha256=GWrJgEXAK08MCdbLk7LQcD2ajKqW_dbONWXh3wieLzw,3016 +dns/dnssecalgs/eddsa.py,sha256=9lQQZ92f2PiIhhylieInO-19aSTDQiyoY8X2kTkGlcs,1914 +dns/dnssecalgs/rsa.py,sha256=jWkhWKByylIo7Y9gAiiO8t8bowF8IZ0siVjgZpdhLSE,3555 +dns/dnssectypes.py,sha256=CyeuGTS_rM3zXr8wD9qMT9jkzvVfTY2JWckUcogG83E,1799 +dns/e164.py,sha256=EsK8cnOtOx7kQ0DmSwibcwkzp6efMWjbRiTyHZO8Q-M,3978 +dns/edns.py,sha256=zV-_hBJXyo573G5RuADpiKcJPAEM_bO-Rhbi1rW9iKM,14004 +dns/entropy.py,sha256=qkG8hXDLzrJS6R5My26iA59c0RhPwJNzuOhOCAZU5Bw,4242 +dns/enum.py,sha256=EepaunPKixTSrascy7iAe9UQEXXxP_MB5Gx4jUpHIhg,3691 +dns/exception.py,sha256=FphWy-JLRG06UUUq2VmUGwdPA1xWja_8YfrcffRFlQs,5957 +dns/flags.py,sha256=cQ3kTFyvcKiWHAxI5AwchNqxVOrsIrgJ6brgrH42Wq8,2750 +dns/grange.py,sha256=HA623Mv2mZDmOK_BZNDDakT0L6EHsMQU9lFFkE8dKr0,2148 +dns/immutable.py,sha256=vvdKWL9jeOCWGb3vBHcjdJqO7ZqY7Cc3jlG7alFFFOo,1836 +dns/inet.py,sha256=pSjZ7qnGQ3sfqcnHuwBaX3NezrOwNRrDV3pZV73Je-w,5278 +dns/ipv4.py,sha256=E3P-j6Hq40CROhx5TPJHyL2hZwheLXzEuo4oJRFFqeg,2064 +dns/ipv6.py,sha256=5EgxQ3q_LNNPqXETkueZQQUl6LYj0HK8FRofwtILfR0,6192 +dns/message.py,sha256=6OXRaNfhSJ_xrTuhUZJ3CdN_kLJ_s78J3kh4O5GHJ_s,63161 +dns/name.py,sha256=sdI8_WQizVfBGUEg_oS0a7qHdCpiNEX9mGbwXF3uxAY,34424 +dns/namedict.py,sha256=hJRYpKeQv6Bd2LaUOPV0L_a0eXEIuqgggPXaH4c3Tow,4000 +dns/nameserver.py,sha256=NqCanrWjj3kQWWsjHgCQ5B3xVOlg5URhYeS1rc7RPZI,9096 +dns/node.py,sha256=58G51FNCWcXqJMUMefs9K1dYPAIpFDS-s-1YAVoqFkM,12664 +dns/opcode.py,sha256=I6JyuFUL0msja_BYm6bzXHfbbfqUod_69Ss4xcv8xWQ,2730 +dns/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +dns/query.py,sha256=9HRjCP8qHMoUCP6F6GS-ASv_sVXEEoCanP-Vvx8FFuE,51935 +dns/quic/__init__.py,sha256=cV_5B_I8JjgfuVUuTItyYyEqhMNl1RDK6abTal1RuBY,2163 +dns/quic/__pycache__/__init__.cpython-38.pyc,, +dns/quic/__pycache__/_asyncio.cpython-38.pyc,, +dns/quic/__pycache__/_common.cpython-38.pyc,, +dns/quic/__pycache__/_sync.cpython-38.pyc,, +dns/quic/__pycache__/_trio.cpython-38.pyc,, +dns/quic/_asyncio.py,sha256=1MqoikjGT4be9-aZG1Vq_ZMOY44oAKwLMoQdGkbW0yE,8003 +dns/quic/_common.py,sha256=7GORU3PVSMD8GpN4oR36HHjSHwAGz2tAqtXLF93GxNo,5503 +dns/quic/_sync.py,sha256=VWFW5ssX6O-g8Nipc_RzPSc5lFv9Tnv88CqbME1Uaes,7707 +dns/quic/_trio.py,sha256=hnz1WCPU8FYWZdbq97_Grq4nmDlOm1qSUrbk0CTkjxc,6544 +dns/rcode.py,sha256=N6JjrIQjCdJy0boKIp8Hcky5tm__LSDscpDz3rE_sgU,4156 +dns/rdata.py,sha256=brq_h01u_bHuLJ_xT1iue8HYXtNjdDqqTELLDRt5QYc,29557 +dns/rdataclass.py,sha256=TK4W4ywB1L_X7EZqk2Gmwnu7vdQpolQF5DtQWyNk5xo,2984 +dns/rdataset.py,sha256=pHywgIeDtJtTTnSb7x5bXzbNv3ORCBuw-ks74EW8qeo,17049 +dns/rdatatype.py,sha256=gIdYZ0iHRlgiTEO-ftobUANmaAmjTnNc4JljMaP1OnQ,7339 +dns/rdtypes/ANY/AFSDB.py,sha256=BpmfO1dxhDxaMCG2kejYQQVNvVmS-PKgwuZpDWBdWTc,1662 +dns/rdtypes/ANY/AMTRELAY.py,sha256=YO4zGhp8SBFibhNYu12Z3xZImvGicWfTcwS3ISiPgYc,3382 +dns/rdtypes/ANY/AVC.py,sha256=D6d0Ex0BThmFXmCWvN2Spa84siVZPiQZeXjuv0TLZrY,1025 +dns/rdtypes/ANY/CAA.py,sha256=fqSrgAtNrWeJMbQTLiJuCKyUGuUMeyrI0dHxRcG0NYk,2512 +dns/rdtypes/ANY/CDNSKEY.py,sha256=behtB7NJWa2I05fLoHrLktqNNjRvBxm5q84GQf3DkLM,1226 +dns/rdtypes/ANY/CDS.py,sha256=BSB8MHej1iN8Qbr2mY2hjJ4o8wXZxJ9yS_rIHCs_pBI,1164 +dns/rdtypes/ANY/CERT.py,sha256=w5tC8POadB_WesP9x2Ir7FhHusZ2WprI-nKPjedgJhc,3534 +dns/rdtypes/ANY/CNAME.py,sha256=dnligzy3ItR3cAh-BW8CkBOIN1iMUuuVwfkhah87G_c,1207 +dns/rdtypes/ANY/CSYNC.py,sha256=cvOarNvap-_4YxdvpkWplwWKoGgkB1trlmvKOp5yiQo,2440 +dns/rdtypes/ANY/DLV.py,sha256=CypU0r_3pU3f1_-lSBywvXjNXu1IFCEDvKELDP-5_qk,987 +dns/rdtypes/ANY/DNAME.py,sha256=0UalrV9mwXfTNRm1CBwu2KywSdchKVJ3w02dY8xXsg0,1151 +dns/rdtypes/ANY/DNSKEY.py,sha256=W47nY1dSxcO3TaXrEo8cWDcsYgPwQDT8h9s9PBKfANU,1224 +dns/rdtypes/ANY/DS.py,sha256=ZkA31leBr2FbB4HmuWK9yuLzkMFqSY9s4Q_8eEmBDvY,996 +dns/rdtypes/ANY/EUI48.py,sha256=j_VNoznxW9legrRI21qHXOTUoKcN1prwtRBx0jfmorI,1152 +dns/rdtypes/ANY/EUI64.py,sha256=QX7_T0t9dmqje7NcQU55qlAV0k7ZnlfaTrEc8H41KRo,1162 +dns/rdtypes/ANY/GPOS.py,sha256=KO16H4VcRe7P2o67jD4eMV1MuVK8W_-xtJeZSUKer3E,4434 +dns/rdtypes/ANY/HINFO.py,sha256=uzMjQoc286m7jFcZ7Bc388NIyG_6lrUM4GrskN6wUMM,2250 +dns/rdtypes/ANY/HIP.py,sha256=JSMKOVkAdM2jV4IoxSnRlIcEtldimL5z_6J1kMiHnRk,3229 +dns/rdtypes/ANY/ISDN.py,sha256=mOI9rSD8BqkdwvNKWwS580Rp-yq13Z_Q3MVoI0aXn_k,2714 +dns/rdtypes/ANY/L32.py,sha256=dF4DNMVOubJysF1YuoIDqpGhzEC7MDIK8r5MJS5WVLw,1287 +dns/rdtypes/ANY/L64.py,sha256=BQC32j-ED1jgNJkZuQ6cP1qpmPQrJU_WwYN4wXAujoE,1593 +dns/rdtypes/ANY/LOC.py,sha256=rr9WaEDZaT9akesOxGeSscXzPVkQO8e9OpVm9CJTsEc,12025 +dns/rdtypes/ANY/LP.py,sha256=Gc9WDLUJDLCVBiBv9OHHsASELdd7IFJ0LXUN8Ud6a8A,1339 +dns/rdtypes/ANY/MX.py,sha256=RqJSBgnejy70cBmhNLrDiTZxVpZnomujroJBWiozSb4,996 +dns/rdtypes/ANY/NID.py,sha256=ezcvntoK3FQ_LbtzVDkUWP1bIqmqOq2COcQ1EEQf_Uc,1545 +dns/rdtypes/ANY/NINFO.py,sha256=-WoY0aN8T4fOFQPj2xUGpAwL-0RpH08jDZ5y-CLlb2Q,1042 +dns/rdtypes/ANY/NS.py,sha256=Y97NCXGAWRi5o8wcsr_5HDXVrU67wfmPYAPRLzs--AY,996 +dns/rdtypes/ANY/NSEC.py,sha256=gasas9ITgOCnXpcq-GDzOfmgHX-K4XQJ00aAirovXGs,2476 +dns/rdtypes/ANY/NSEC3.py,sha256=7N1X8XENEMy8Eu-OS-Sv1iR2IML-LL4IFgfpZE_uBP0,4152 +dns/rdtypes/ANY/NSEC3PARAM.py,sha256=wbiKFzM-d43zd2Yuy7lGwObXDufy7P9fDCypYKQmbXE,2636 +dns/rdtypes/ANY/OPENPGPKEY.py,sha256=rhcB9knQVTcSoS3yOamXzNUzxNuSbYQP0WkEUNF3k_g,1852 +dns/rdtypes/ANY/OPT.py,sha256=W9rUHHVxYyKE0cQnrojh2UQnEz5H0TCXtN5ag-KEpZU,2562 +dns/rdtypes/ANY/PTR.py,sha256=cUwuFKXckmAjabsBT3lMaHyHzA_24QDHUi5goFMXXuc,998 +dns/rdtypes/ANY/RP.py,sha256=FmX8WQ_XaGXYUoArud1T4lTi2yXrSqqxaguHhB9y7MY,2185 +dns/rdtypes/ANY/RRSIG.py,sha256=oyYoYdjv421AQ_w7Kylq3Kp6BAO6sINqo3ZtcWZfHZ8,4924 +dns/rdtypes/ANY/RT.py,sha256=sErLRUOGk3H_jQ1-Pu2h30HqqFMqIQaWuasRx6OH-wU,1014 +dns/rdtypes/ANY/SMIMEA.py,sha256=6yjHuVDfIEodBU9wxbCGCDZ5cWYwyY6FCk-aq2VNU0s,222 +dns/rdtypes/ANY/SOA.py,sha256=mnfEFZZ-et0Euz1o3bDUxWXEtVmTzQ2VkPwVDUVIogM,3146 +dns/rdtypes/ANY/SPF.py,sha256=M1_Nm5l4pHPlhUT13oTnSRdpI-gpTpM5myUE0Ym00V0,1023 +dns/rdtypes/ANY/SSHFP.py,sha256=avOC-M3V3-ukuo1Ej7nNwjzLjrCLDGLRrvMgw_wwLWE,2531 +dns/rdtypes/ANY/TKEY.py,sha256=PxytD37IfJDH3jDI4Tvv6X3lrP5ldMFn0y_nO6WGxIY,4932 +dns/rdtypes/ANY/TLSA.py,sha256=EYP7AXBh4zMtBgnfz828qfoLkFLCm5g8vR-6oX_DTbE,219 +dns/rdtypes/ANY/TSIG.py,sha256=zrA3aWrgmL1Wf-svd1gHx4Us1QNA0qI1YcsVAybulSk,4751 +dns/rdtypes/ANY/TXT.py,sha256=jKTLk67pF0MaiGllLsBXgH_Z-PJjLFKTnI5mO36i7_0,1001 +dns/rdtypes/ANY/URI.py,sha256=wSGmh6pStbLtttwD196POVeHvGqcsEBKp5G4TYPyCwA,2922 +dns/rdtypes/ANY/X25.py,sha256=KLZT5BRBMor8GRlhqNSnjd5zVym0yihMt0MOuP97h2I,1945 +dns/rdtypes/ANY/ZONEMD.py,sha256=tiOqxbp_A3CSmeZ5rO4aixiWAfzgUzmPOEjfo1ecj40,2394 +dns/rdtypes/ANY/__init__.py,sha256=Pox71HfsEnGGB1PGU44pwrrmjxPLQlA-IbX6nQRoA2M,1497 +dns/rdtypes/ANY/__pycache__/AFSDB.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/AMTRELAY.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/AVC.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/CAA.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/CDNSKEY.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/CDS.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/CERT.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/CNAME.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/CSYNC.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/DLV.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/DNAME.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/DNSKEY.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/DS.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/EUI48.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/EUI64.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/GPOS.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/HINFO.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/HIP.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/ISDN.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/L32.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/L64.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/LOC.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/LP.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/MX.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/NID.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/NINFO.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/NS.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC3.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/NSEC3PARAM.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/OPENPGPKEY.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/OPT.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/PTR.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/RP.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/RRSIG.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/RT.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/SMIMEA.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/SOA.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/SPF.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/SSHFP.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/TKEY.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/TLSA.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/TSIG.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/TXT.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/URI.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/X25.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/ZONEMD.cpython-38.pyc,, +dns/rdtypes/ANY/__pycache__/__init__.cpython-38.pyc,, +dns/rdtypes/CH/A.py,sha256=hOLY9c7VkGMQZkKqjclUMj2s4M5wD1-M8F738FwyTFw,2217 +dns/rdtypes/CH/__init__.py,sha256=GD9YeDKb9VBDo-J5rrChX1MWEGyQXuR9Htnbhg_iYLc,923 +dns/rdtypes/CH/__pycache__/A.cpython-38.pyc,, +dns/rdtypes/CH/__pycache__/__init__.cpython-38.pyc,, +dns/rdtypes/IN/A.py,sha256=pq9G7ZZrCCEBWOFWvLmivtu8b_9ZAFIJU0oQUMHHudE,1815 +dns/rdtypes/IN/AAAA.py,sha256=GvQ1So05gExl1pw0HxO4ypfrT-EfnVCwBVVqgSvq5zk,1821 +dns/rdtypes/IN/APL.py,sha256=0GaLpxZ1jFI82NUJ2-t23PRd9q-678X9Z8iJZBVzxtA,5099 +dns/rdtypes/IN/DHCID.py,sha256=x-JxOiEbaOUREw2QXBLKvGTrljn7mrs8fO2jjlmc4AI,1857 +dns/rdtypes/IN/HTTPS.py,sha256=P-IjwcvDQMmtoBgsDHglXF7KgLX73G6jEDqCKsnaGpQ,220 +dns/rdtypes/IN/IPSECKEY.py,sha256=E1jjo99dVZ1GiB6sGioPZR9sG1aP9aZbifq7F91IjOI,3291 +dns/rdtypes/IN/KX.py,sha256=q8Ns4jiCuxqkYZzfkihS2WXaQ7qxQY7SSzibJu_oNQ0,1014 +dns/rdtypes/IN/NAPTR.py,sha256=MxuMrWCAR3p7vHPFddtWa2uQxvXTPKjkawbT_JxyZ0A,3751 +dns/rdtypes/IN/NSAP.py,sha256=bHxNkjZYbq5MrkWQC8ILoTrqpoNzRfCrWtOFgSjoO60,2166 +dns/rdtypes/IN/NSAP_PTR.py,sha256=HX32Hz7pop1a8amu8hLvi2-5YtzbROBUsbGmbOngcZ0,1016 +dns/rdtypes/IN/PX.py,sha256=g8OQekugan0lUTEjwRmApDKqSlEzFa_-FtM4Z9w7TnU,2757 +dns/rdtypes/IN/SRV.py,sha256=OTMa1rnbWtBf19ft7uAcjGAEtUwk-C-rDR9pAaUvBMA,2770 +dns/rdtypes/IN/SVCB.py,sha256=HeFmi2v01F00Hott8FlvQ4R7aPxFmT7RF-gt45R5K_M,218 +dns/rdtypes/IN/WKS.py,sha256=sE-jEDIrKzchRA2-KqChOTSRSDrueINDhD_hPH4IjR0,3653 +dns/rdtypes/IN/__init__.py,sha256=HbI8aw9HWroI6SgEvl8Sx6FdkDswCCXMbSRuJy5o8LQ,1083 +dns/rdtypes/IN/__pycache__/A.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/AAAA.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/APL.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/DHCID.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/HTTPS.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/IPSECKEY.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/KX.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/NAPTR.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/NSAP.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/NSAP_PTR.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/PX.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/SRV.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/SVCB.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/WKS.cpython-38.pyc,, +dns/rdtypes/IN/__pycache__/__init__.cpython-38.pyc,, +dns/rdtypes/__init__.py,sha256=NYizfGglJfhqt_GMtSSXf7YQXIEHHCiJ_Y_qaLVeiOI,1073 +dns/rdtypes/__pycache__/__init__.cpython-38.pyc,, +dns/rdtypes/__pycache__/dnskeybase.cpython-38.pyc,, +dns/rdtypes/__pycache__/dsbase.cpython-38.pyc,, +dns/rdtypes/__pycache__/euibase.cpython-38.pyc,, +dns/rdtypes/__pycache__/mxbase.cpython-38.pyc,, +dns/rdtypes/__pycache__/nsbase.cpython-38.pyc,, +dns/rdtypes/__pycache__/svcbbase.cpython-38.pyc,, +dns/rdtypes/__pycache__/tlsabase.cpython-38.pyc,, +dns/rdtypes/__pycache__/txtbase.cpython-38.pyc,, +dns/rdtypes/__pycache__/util.cpython-38.pyc,, +dns/rdtypes/dnskeybase.py,sha256=-KMVEf-AKcfaQo4GXkQYz68PH-3nqJYJsl31Gtv2Vv8,2857 +dns/rdtypes/dsbase.py,sha256=_LkKfZ5rA0eV-AVvkhWjZ8niiDQQRdvTPGKmmFeGDiM,3428 +dns/rdtypes/euibase.py,sha256=uPt1qsqMB32T7LOxMsSfEtakF4A4prWLoZR-_MU4Hp8,2631 +dns/rdtypes/mxbase.py,sha256=iPJcRxNljkrXN3oi29_VE6qkTrOqpeNsp6pRo_F9h9M,3199 +dns/rdtypes/nsbase.py,sha256=X52rY0FrKpg38TvUIN_0wQJJpBTMgbuBDMEIF1cFXF4,2325 +dns/rdtypes/svcbbase.py,sha256=hO3JFkPJ1YV60AfIhJy9qXu4glZagRTapuhV6sPyjNo,16952 +dns/rdtypes/tlsabase.py,sha256=QtyXSL4wxPU-1LfiU6Pj-wrvaFg4_qNjvrGX5g0JbVA,2597 +dns/rdtypes/txtbase.py,sha256=xdGVjo817CYrbpUsC00LsBIbnFCnXJZr6ENcdQpSqnA,3630 +dns/rdtypes/util.py,sha256=6AGQ-k3mLNlx4Ep_FiDABj1WVumUUGs3zQ6X-2iISec,9003 +dns/renderer.py,sha256=44AhGs5mkMDDck2WZ6ScTyCYGPh7UrOEiCQRsdqs0uI,10673 +dns/resolver.py,sha256=wagpUIu8Oh12O-zk48U30A6VQQOspjfibU4Ls2So-kM,73552 +dns/reversename.py,sha256=zoqXEbMZXm6R13nXbJHgTsf6L2C6uReODj6mqSHrTiE,3828 +dns/rrset.py,sha256=QMz3uoWt8X0tmbVHOCrOuUozL795A-aT3RyzaPyv98M,9168 +dns/serial.py,sha256=-t5rPW-TcJwzBMfIJo7Tl-uDtaYtpqOfCVYx9dMaDCY,3606 +dns/set.py,sha256=R8LN8_aUrvOfav5Za7VcSrdi0D10jJsSrNewdrc8Lwg,9089 +dns/tokenizer.py,sha256=Dcc3lQgEIHCVZBuO6FaKWEojtPSd3EuaUC4vQA-spnk,23583 +dns/transaction.py,sha256=b-jo7wxbkB1bxVRw0a0hE0lZRveKV6WcYKOUVctUpdw,22660 +dns/tsig.py,sha256=1YzDVByrQ_qpITVQjO4fIkZUqY9d6lMJwv_ePeQNPmE,11422 +dns/tsigkeyring.py,sha256=Z0xZemcU3XjZ9HlxBYv2E2PSuIhaFreqLDlD7HcmZDA,2633 +dns/ttl.py,sha256=fWFkw8qfk6saTp7lAPxZOuD3U3TRxVRvIpljQnG-01I,2979 +dns/update.py,sha256=y9d6LOO8xrUaH2UrZhy3ssnx8bJEsxqTArw5V8XqBRs,12243 +dns/version.py,sha256=tV2AtPHOnpAh4Tt6Xz7VjswJej9aIiOOAm-voiNqxro,1926 +dns/versioned.py,sha256=3YQj8mzGmZEsjnuVJJjcWopVmDKYLhEj4hEGTLEwzco,11765 +dns/win32util.py,sha256=l6OznV8cDXxuqB6yBFhYt_-P1v_ENnS-8DCQqNw8DWI,9005 +dns/wire.py,sha256=vy0SolgECbO1UXB4dnhXhDeFKOJT29nQxXvSfKOgA5s,2830 +dns/xfr.py,sha256=FKkKO-kSpyE1vHU5mnoPIP4YxiCl5gG7E5wOgY_4GO8,13273 +dns/zone.py,sha256=0PJLT0gqTJ4Woq37ucN80k_-xBm0oeqk7JBnJwpCca0,51093 +dns/zonefile.py,sha256=rJEY8rEvDG9BETsJsb-TozRe5ccAkgSNs0yb9vx-_uU,27930 +dns/zonetypes.py,sha256=HrQNZxZ_gWLWI9dskix71msi9wkYK5pgrBBbPb1T74Y,690 +dnspython-2.4.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +dnspython-2.4.2.dist-info/LICENSE,sha256=w-o_9WVLMpwZ07xfdIGvYjw93tSmFFWFSZ-EOtPXQc0,1526 +dnspython-2.4.2.dist-info/METADATA,sha256=aQdMbHIJB5rtUWHQyRB5EAPB7km6lV3ohDC5cpygnbs,4892 +dnspython-2.4.2.dist-info/RECORD,, +dnspython-2.4.2.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88 diff --git a/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..258a6ff36f44a40ff24d2bbd6d4c934481da1402 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/dnspython-2.4.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.6.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/backend/test/lib/python3.8/site-packages/easy_install.py b/backend/test/lib/python3.8/site-packages/easy_install.py new file mode 100644 index 0000000000000000000000000000000000000000..d87e984034b6e6e9eb456ebcb2b3f420c07a48bc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/LICENSE.rst b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/LICENSE.rst new file mode 100644 index 0000000000000000000000000000000000000000..9d227a0cc43c3268d15722b763bd94ad298645a1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2010 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..d7c314536239e2b54a4166f1edf315c2a31b7ea6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/METADATA @@ -0,0 +1,116 @@ +Metadata-Version: 2.1 +Name: Flask +Version: 2.3.3 +Summary: A simple framework for building complex web applications. +Maintainer-email: Pallets <contact@palletsprojects.com> +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Framework :: Flask +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Dist: Werkzeug>=2.3.7 +Requires-Dist: Jinja2>=3.1.2 +Requires-Dist: itsdangerous>=2.1.2 +Requires-Dist: click>=8.1.3 +Requires-Dist: blinker>=1.6.2 +Requires-Dist: importlib-metadata>=3.6.0; python_version < '3.10' +Requires-Dist: asgiref>=3.2 ; extra == "async" +Requires-Dist: python-dotenv ; extra == "dotenv" +Project-URL: Changes, https://flask.palletsprojects.com/changes/ +Project-URL: Chat, https://discord.gg/pallets +Project-URL: Documentation, https://flask.palletsprojects.com/ +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Issue Tracker, https://github.com/pallets/flask/issues/ +Project-URL: Source Code, https://github.com/pallets/flask/ +Provides-Extra: async +Provides-Extra: dotenv + +Flask +===== + +Flask is a lightweight `WSGI`_ web application framework. It is designed +to make getting started quick and easy, with the ability to scale up to +complex applications. It began as a simple wrapper around `Werkzeug`_ +and `Jinja`_ and has become one of the most popular Python web +application frameworks. + +Flask offers suggestions, but doesn't enforce any dependencies or +project layout. It is up to the developer to choose the tools and +libraries they want to use. There are many extensions provided by the +community that make adding new functionality easy. + +.. _WSGI: https://wsgi.readthedocs.io/ +.. _Werkzeug: https://werkzeug.palletsprojects.com/ +.. _Jinja: https://jinja.palletsprojects.com/ + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + $ pip install -U Flask + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + # save this as app.py + from flask import Flask + + app = Flask(__name__) + + @app.route("/") + def hello(): + return "Hello, World!" + +.. code-block:: text + + $ flask run + * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) + + +Contributing +------------ + +For guidance on setting up a development environment and how to make a +contribution to Flask, see the `contributing guidelines`_. + +.. _contributing guidelines: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst + + +Donate +------ + +The Pallets organization develops and supports Flask and the libraries +it uses. In order to grow the community of contributors and users, and +allow the maintainers to devote more time to the projects, `please +donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://flask.palletsprojects.com/ +- Changes: https://flask.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Flask/ +- Source Code: https://github.com/pallets/flask/ +- Issue Tracker: https://github.com/pallets/flask/issues/ +- Chat: https://discord.gg/pallets + diff --git a/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..fd3baa1d4bebb2f8a78331231f70375087f037c6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/RECORD @@ -0,0 +1,52 @@ +../../../bin/flask,sha256=ewUurHglAMBhSyshBUJn-ZHYdrRrR-XaO8bNdL4nLpw,245 +flask-2.3.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +flask-2.3.3.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 +flask-2.3.3.dist-info/METADATA,sha256=-BtXVsnPe7lNA3mcFZHJfsVIiVin1A8LUstChm8qiHo,3588 +flask-2.3.3.dist-info/RECORD,, +flask-2.3.3.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +flask-2.3.3.dist-info/entry_points.txt,sha256=bBP7hTOS5fz9zLtC7sPofBZAlMkEvBxu7KqS6l5lvc4,40 +flask/__init__.py,sha256=xq09XNKP-Y-fdv6BeGH7RlFaY006tUA3o_llGcl-dno,3731 +flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 +flask/__pycache__/__init__.cpython-38.pyc,, +flask/__pycache__/__main__.cpython-38.pyc,, +flask/__pycache__/app.cpython-38.pyc,, +flask/__pycache__/blueprints.cpython-38.pyc,, +flask/__pycache__/cli.cpython-38.pyc,, +flask/__pycache__/config.cpython-38.pyc,, +flask/__pycache__/ctx.cpython-38.pyc,, +flask/__pycache__/debughelpers.cpython-38.pyc,, +flask/__pycache__/globals.cpython-38.pyc,, +flask/__pycache__/helpers.cpython-38.pyc,, +flask/__pycache__/logging.cpython-38.pyc,, +flask/__pycache__/scaffold.cpython-38.pyc,, +flask/__pycache__/sessions.cpython-38.pyc,, +flask/__pycache__/signals.cpython-38.pyc,, +flask/__pycache__/templating.cpython-38.pyc,, +flask/__pycache__/testing.cpython-38.pyc,, +flask/__pycache__/typing.cpython-38.pyc,, +flask/__pycache__/views.cpython-38.pyc,, +flask/__pycache__/wrappers.cpython-38.pyc,, +flask/app.py,sha256=ht3Qx9U9z0I1qUfLoS7bYhJcubdpk-i54eHq37LDlN8,87620 +flask/blueprints.py,sha256=ZpVrwa8UY-YnVDsX_1K10XQjDwCUp7Qn2hmKln5icEQ,24332 +flask/cli.py,sha256=PDwZCfPagi5GUzb-D6dEN7y20gWiVAg3ejRnxBKNHPA,33821 +flask/config.py,sha256=YZSZ-xpFj1iW1B1Kj1iDhpc5s7pHncloiRLqXhsU7Hs,12856 +flask/ctx.py,sha256=x2kGzUXtPzVyi2YSKrU_PV1AvtxTmh2iRdriJRTSPGM,14841 +flask/debughelpers.py,sha256=BR0xkd-sAyFuFW07D6NfrqNwSZxk1IrkG5n8zem-3sw,5547 +flask/globals.py,sha256=KUzVvSPh8v28kUasVDi_aQKB9hI2jZSYQHqaDU2P414,2945 +flask/helpers.py,sha256=uVhMwhhfwgjBt8b--zIZTjkfBRK28yPpmNhgVzhP444,25106 +flask/json/__init__.py,sha256=pdtpoK2b0b1u7Sxbx3feM7VWhsI20l1yGAvbYWxaxvc,5572 +flask/json/__pycache__/__init__.cpython-38.pyc,, +flask/json/__pycache__/provider.cpython-38.pyc,, +flask/json/__pycache__/tag.cpython-38.pyc,, +flask/json/provider.py,sha256=Os0frb8oGfyWKL-TDxb0Uy-MY6gDhPdJkRaUl5xAOXI,7637 +flask/json/tag.py,sha256=ihb7QWrNEr0YC3KD4TolZbftgSPCuLk7FAvK49huYC0,8871 +flask/logging.py,sha256=lArx2Bq9oTtUJ-DnZL9t88xU2zytzp4UWSM9Bd72NDQ,2327 +flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +flask/scaffold.py,sha256=ALGHLcy2qSbJ7ENd1H8dOnq5VDgH5XSFsOkDelcOKV8,33217 +flask/sessions.py,sha256=rFH2QKXG24dEazkKGxAHqUpAUh_30hDHrddhVYgAcY0,14169 +flask/signals.py,sha256=s1H4yKjf3c5dgVr41V6sJpE9dLJvmTJMYuK0rkqx3sw,1146 +flask/templating.py,sha256=XdP2hMFnZ5FCZOG7HUaLjC2VC-b4uHSWlDjwv_1p3qc,7503 +flask/testing.py,sha256=h7AinggrMgGzKlDN66VfB0JjWW4Z1U_OD6FyjqBNiYM,10017 +flask/typing.py,sha256=4Lj-YTxUoYvPYofC9GKu-1o0Ht8lyjp9z3I336J13_o,3005 +flask/views.py,sha256=V5hOGZLx0Bn99QGcM6mh5x_uM-MypVT0-RysEFU84jc,6789 +flask/wrappers.py,sha256=PhMp3teK3SnEmIdog59cO_DHiZ9Btn0qI1EifrTdwP8,5709 diff --git a/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..3b5e64b5e6c4a210201d1676a891fd57b15cda99 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/entry_points.txt b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..eec6733e577feb9487435b9722713a820bd4ccc1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask-2.3.3.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +flask=flask.cli:main + diff --git a/backend/test/lib/python3.8/site-packages/flask/__init__.py b/backend/test/lib/python3.8/site-packages/flask/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bdca1b0149179a48a8e417b0c0ae912ebb2a0b57 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/__init__.py @@ -0,0 +1,102 @@ +from . import json as json +from .app import Flask as Flask +from .app import Request as Request +from .app import Response as Response +from .blueprints import Blueprint as Blueprint +from .config import Config as Config +from .ctx import after_this_request as after_this_request +from .ctx import copy_current_request_context as copy_current_request_context +from .ctx import has_app_context as has_app_context +from .ctx import has_request_context as has_request_context +from .globals import current_app as current_app +from .globals import g as g +from .globals import request as request +from .globals import session as session +from .helpers import abort as abort +from .helpers import flash as flash +from .helpers import get_flashed_messages as get_flashed_messages +from .helpers import get_template_attribute as get_template_attribute +from .helpers import make_response as make_response +from .helpers import redirect as redirect +from .helpers import send_file as send_file +from .helpers import send_from_directory as send_from_directory +from .helpers import stream_with_context as stream_with_context +from .helpers import url_for as url_for +from .json import jsonify as jsonify +from .signals import appcontext_popped as appcontext_popped +from .signals import appcontext_pushed as appcontext_pushed +from .signals import appcontext_tearing_down as appcontext_tearing_down +from .signals import before_render_template as before_render_template +from .signals import got_request_exception as got_request_exception +from .signals import message_flashed as message_flashed +from .signals import request_finished as request_finished +from .signals import request_started as request_started +from .signals import request_tearing_down as request_tearing_down +from .signals import template_rendered as template_rendered +from .templating import render_template as render_template +from .templating import render_template_string as render_template_string +from .templating import stream_template as stream_template +from .templating import stream_template_string as stream_template_string + +__version__ = "2.3.3" + + +def __getattr__(name): + if name == "_app_ctx_stack": + import warnings + from .globals import __app_ctx_stack + + warnings.warn( + "'_app_ctx_stack' is deprecated and will be removed in Flask 2.4.", + DeprecationWarning, + stacklevel=2, + ) + return __app_ctx_stack + + if name == "_request_ctx_stack": + import warnings + from .globals import __request_ctx_stack + + warnings.warn( + "'_request_ctx_stack' is deprecated and will be removed in Flask 2.4.", + DeprecationWarning, + stacklevel=2, + ) + return __request_ctx_stack + + if name == "escape": + import warnings + from markupsafe import escape + + warnings.warn( + "'flask.escape' is deprecated and will be removed in Flask 2.4. Import" + " 'markupsafe.escape' instead.", + DeprecationWarning, + stacklevel=2, + ) + return escape + + if name == "Markup": + import warnings + from markupsafe import Markup + + warnings.warn( + "'flask.Markup' is deprecated and will be removed in Flask 2.4. Import" + " 'markupsafe.Markup' instead.", + DeprecationWarning, + stacklevel=2, + ) + return Markup + + if name == "signals_available": + import warnings + + warnings.warn( + "'signals_available' is deprecated and will be removed in Flask 2.4." + " Signals are always available", + DeprecationWarning, + stacklevel=2, + ) + return True + + raise AttributeError(name) diff --git a/backend/test/lib/python3.8/site-packages/flask/__main__.py b/backend/test/lib/python3.8/site-packages/flask/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..4e28416e104515e90fca4b69cc60d0c61fd15d61 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/__main__.py @@ -0,0 +1,3 @@ +from .cli import main + +main() diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..949298972352f26f4c0aba9cd9b3ef61098a5b6b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/__main__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dec9d0d57feafc7f168a0f976c0480c8ac755960 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/__main__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/app.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/app.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33371a98397c400a24d438ba7517b838ea011294 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/app.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/blueprints.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/blueprints.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f1ec6c17073011f5b3f1c056d27e943743c85076 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/blueprints.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/cli.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/cli.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5fcea108c8fa1ca77543ed6a04254c787bddaaac Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/cli.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/config.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/config.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..21ca230ab3d4af924fea79f544d601ca8b855300 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/config.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/ctx.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/ctx.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ddcb72f88bbf827105efe3e7afec27baf98cd1e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/ctx.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/debughelpers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/debughelpers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..816ac5d90ad4ffc221c8e4da2adf29b728f19c1c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/debughelpers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/globals.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/globals.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f0869e3e390ec5580399027c741d124e74bd61f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/globals.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/helpers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/helpers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ccf5aa784cbd784f5d62f017e3cb4e59d72a3a5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/helpers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/logging.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/logging.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b93d92e352bff7e71bf96266c17cd80076c00c42 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/logging.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/scaffold.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/scaffold.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a206a40ca392a70422a4867b58e6566f39ddfb72 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/scaffold.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/sessions.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/sessions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..27babb84e2e164c88bb40cc6c773c06b0b70624b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/sessions.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/signals.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/signals.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7cacdb299f61da9a5094fedc9b963fbc1a57dbd Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/signals.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/templating.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/templating.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..710f6c6005c5f563d8d4de0457e63d30d2486a96 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/templating.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/testing.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/testing.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eaa81b860f9a9284907f3c0813074f5a0b6e6266 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/testing.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/typing.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/typing.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6d19058fc4da781b6826519dc2fdbf05c6ed7656 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/typing.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/views.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/views.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac67277b513eaeda23b22460ef69bb282e52b37b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/views.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/__pycache__/wrappers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/__pycache__/wrappers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..783df4154184b2d2e5834299acb27f98d06c1d83 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/__pycache__/wrappers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/app.py b/backend/test/lib/python3.8/site-packages/flask/app.py new file mode 100644 index 0000000000000000000000000000000000000000..3b6b38d8ad7f32af70edfead740af1aa313b76f3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/app.py @@ -0,0 +1,2213 @@ +from __future__ import annotations + +import logging +import os +import sys +import typing as t +import weakref +from collections.abc import Iterator as _abc_Iterator +from datetime import timedelta +from inspect import iscoroutinefunction +from itertools import chain +from types import TracebackType +from urllib.parse import quote as _url_quote + +import click +from werkzeug.datastructures import Headers +from werkzeug.datastructures import ImmutableDict +from werkzeug.exceptions import Aborter +from werkzeug.exceptions import BadRequest +from werkzeug.exceptions import BadRequestKeyError +from werkzeug.exceptions import HTTPException +from werkzeug.exceptions import InternalServerError +from werkzeug.routing import BuildError +from werkzeug.routing import Map +from werkzeug.routing import MapAdapter +from werkzeug.routing import RequestRedirect +from werkzeug.routing import RoutingException +from werkzeug.routing import Rule +from werkzeug.serving import is_running_from_reloader +from werkzeug.utils import cached_property +from werkzeug.utils import redirect as _wz_redirect +from werkzeug.wrappers import Response as BaseResponse + +from . import cli +from . import typing as ft +from .config import Config +from .config import ConfigAttribute +from .ctx import _AppCtxGlobals +from .ctx import AppContext +from .ctx import RequestContext +from .globals import _cv_app +from .globals import _cv_request +from .globals import g +from .globals import request +from .globals import request_ctx +from .globals import session +from .helpers import _split_blueprint_path +from .helpers import get_debug_flag +from .helpers import get_flashed_messages +from .helpers import get_load_dotenv +from .json.provider import DefaultJSONProvider +from .json.provider import JSONProvider +from .logging import create_logger +from .scaffold import _endpoint_from_view_func +from .scaffold import _sentinel +from .scaffold import find_package +from .scaffold import Scaffold +from .scaffold import setupmethod +from .sessions import SecureCookieSessionInterface +from .sessions import SessionInterface +from .signals import appcontext_tearing_down +from .signals import got_request_exception +from .signals import request_finished +from .signals import request_started +from .signals import request_tearing_down +from .templating import DispatchingJinjaLoader +from .templating import Environment +from .wrappers import Request +from .wrappers import Response + +if t.TYPE_CHECKING: # pragma: no cover + from .blueprints import Blueprint + from .testing import FlaskClient + from .testing import FlaskCliRunner + +T_shell_context_processor = t.TypeVar( + "T_shell_context_processor", bound=ft.ShellContextProcessorCallable +) +T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) +T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) +T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) +T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) + + +def _make_timedelta(value: timedelta | int | None) -> timedelta | None: + if value is None or isinstance(value, timedelta): + return value + + return timedelta(seconds=value) + + +class Flask(Scaffold): + """The flask object implements a WSGI application and acts as the central + object. It is passed the name of the module or package of the + application. Once it is created it will act as a central registry for + the view functions, the URL rules, template configuration and much more. + + The name of the package is used to resolve resources from inside the + package or the folder the module is contained in depending on if the + package parameter resolves to an actual python package (a folder with + an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). + + For more information about resource loading, see :func:`open_resource`. + + Usually you create a :class:`Flask` instance in your main module or + in the :file:`__init__.py` file of your package like this:: + + from flask import Flask + app = Flask(__name__) + + .. admonition:: About the First Parameter + + The idea of the first parameter is to give Flask an idea of what + belongs to your application. This name is used to find resources + on the filesystem, can be used by extensions to improve debugging + information and a lot more. + + So it's important what you provide there. If you are using a single + module, `__name__` is always the correct value. If you however are + using a package, it's usually recommended to hardcode the name of + your package there. + + For example if your application is defined in :file:`yourapplication/app.py` + you should create it with one of the two versions below:: + + app = Flask('yourapplication') + app = Flask(__name__.split('.')[0]) + + Why is that? The application will work even with `__name__`, thanks + to how resources are looked up. However it will make debugging more + painful. Certain extensions can make assumptions based on the + import name of your application. For example the Flask-SQLAlchemy + extension will look for the code in your application that triggered + an SQL query in debug mode. If the import name is not properly set + up, that debugging information is lost. (For example it would only + pick up SQL queries in `yourapplication.app` and not + `yourapplication.views.frontend`) + + .. versionadded:: 0.7 + The `static_url_path`, `static_folder`, and `template_folder` + parameters were added. + + .. versionadded:: 0.8 + The `instance_path` and `instance_relative_config` parameters were + added. + + .. versionadded:: 0.11 + The `root_path` parameter was added. + + .. versionadded:: 1.0 + The ``host_matching`` and ``static_host`` parameters were added. + + .. versionadded:: 1.0 + The ``subdomain_matching`` parameter was added. Subdomain + matching needs to be enabled manually now. Setting + :data:`SERVER_NAME` does not implicitly enable it. + + :param import_name: the name of the application package + :param static_url_path: can be used to specify a different path for the + static files on the web. Defaults to the name + of the `static_folder` folder. + :param static_folder: The folder with static files that is served at + ``static_url_path``. Relative to the application ``root_path`` + or an absolute path. Defaults to ``'static'``. + :param static_host: the host to use when adding the static route. + Defaults to None. Required when using ``host_matching=True`` + with a ``static_folder`` configured. + :param host_matching: set ``url_map.host_matching`` attribute. + Defaults to False. + :param subdomain_matching: consider the subdomain relative to + :data:`SERVER_NAME` when matching routes. Defaults to False. + :param template_folder: the folder that contains the templates that should + be used by the application. Defaults to + ``'templates'`` folder in the root path of the + application. + :param instance_path: An alternative instance path for the application. + By default the folder ``'instance'`` next to the + package or module is assumed to be the instance + path. + :param instance_relative_config: if set to ``True`` relative filenames + for loading the config are assumed to + be relative to the instance path instead + of the application root. + :param root_path: The path to the root of the application files. + This should only be set manually when it can't be detected + automatically, such as for namespace packages. + """ + + #: The class that is used for request objects. See :class:`~flask.Request` + #: for more information. + request_class = Request + + #: The class that is used for response objects. See + #: :class:`~flask.Response` for more information. + response_class = Response + + #: The class of the object assigned to :attr:`aborter`, created by + #: :meth:`create_aborter`. That object is called by + #: :func:`flask.abort` to raise HTTP errors, and can be + #: called directly as well. + #: + #: Defaults to :class:`werkzeug.exceptions.Aborter`. + #: + #: .. versionadded:: 2.2 + aborter_class = Aborter + + #: The class that is used for the Jinja environment. + #: + #: .. versionadded:: 0.11 + jinja_environment = Environment + + #: The class that is used for the :data:`~flask.g` instance. + #: + #: Example use cases for a custom class: + #: + #: 1. Store arbitrary attributes on flask.g. + #: 2. Add a property for lazy per-request database connectors. + #: 3. Return None instead of AttributeError on unexpected attributes. + #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. + #: + #: In Flask 0.9 this property was called `request_globals_class` but it + #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the + #: flask.g object is now application context scoped. + #: + #: .. versionadded:: 0.10 + app_ctx_globals_class = _AppCtxGlobals + + #: The class that is used for the ``config`` attribute of this app. + #: Defaults to :class:`~flask.Config`. + #: + #: Example use cases for a custom class: + #: + #: 1. Default values for certain config options. + #: 2. Access to config values through attributes in addition to keys. + #: + #: .. versionadded:: 0.11 + config_class = Config + + #: The testing flag. Set this to ``True`` to enable the test mode of + #: Flask extensions (and in the future probably also Flask itself). + #: For example this might activate test helpers that have an + #: additional runtime cost which should not be enabled by default. + #: + #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the + #: default it's implicitly enabled. + #: + #: This attribute can also be configured from the config with the + #: ``TESTING`` configuration key. Defaults to ``False``. + testing = ConfigAttribute("TESTING") + + #: If a secret key is set, cryptographic components can use this to + #: sign cookies and other things. Set this to a complex random value + #: when you want to use the secure cookie for instance. + #: + #: This attribute can also be configured from the config with the + #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. + secret_key = ConfigAttribute("SECRET_KEY") + + #: A :class:`~datetime.timedelta` which is used to set the expiration + #: date of a permanent session. The default is 31 days which makes a + #: permanent session survive for roughly one month. + #: + #: This attribute can also be configured from the config with the + #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to + #: ``timedelta(days=31)`` + permanent_session_lifetime = ConfigAttribute( + "PERMANENT_SESSION_LIFETIME", get_converter=_make_timedelta + ) + + json_provider_class: type[JSONProvider] = DefaultJSONProvider + """A subclass of :class:`~flask.json.provider.JSONProvider`. An + instance is created and assigned to :attr:`app.json` when creating + the app. + + The default, :class:`~flask.json.provider.DefaultJSONProvider`, uses + Python's built-in :mod:`json` library. A different provider can use + a different JSON library. + + .. versionadded:: 2.2 + """ + + #: Options that are passed to the Jinja environment in + #: :meth:`create_jinja_environment`. Changing these options after + #: the environment is created (accessing :attr:`jinja_env`) will + #: have no effect. + #: + #: .. versionchanged:: 1.1.0 + #: This is a ``dict`` instead of an ``ImmutableDict`` to allow + #: easier configuration. + #: + jinja_options: dict = {} + + #: Default configuration parameters. + default_config = ImmutableDict( + { + "DEBUG": None, + "TESTING": False, + "PROPAGATE_EXCEPTIONS": None, + "SECRET_KEY": None, + "PERMANENT_SESSION_LIFETIME": timedelta(days=31), + "USE_X_SENDFILE": False, + "SERVER_NAME": None, + "APPLICATION_ROOT": "/", + "SESSION_COOKIE_NAME": "session", + "SESSION_COOKIE_DOMAIN": None, + "SESSION_COOKIE_PATH": None, + "SESSION_COOKIE_HTTPONLY": True, + "SESSION_COOKIE_SECURE": False, + "SESSION_COOKIE_SAMESITE": None, + "SESSION_REFRESH_EACH_REQUEST": True, + "MAX_CONTENT_LENGTH": None, + "SEND_FILE_MAX_AGE_DEFAULT": None, + "TRAP_BAD_REQUEST_ERRORS": None, + "TRAP_HTTP_EXCEPTIONS": False, + "EXPLAIN_TEMPLATE_LOADING": False, + "PREFERRED_URL_SCHEME": "http", + "TEMPLATES_AUTO_RELOAD": None, + "MAX_COOKIE_SIZE": 4093, + } + ) + + #: The rule object to use for URL rules created. This is used by + #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. + #: + #: .. versionadded:: 0.7 + url_rule_class = Rule + + #: The map object to use for storing the URL rules and routing + #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. + #: + #: .. versionadded:: 1.1.0 + url_map_class = Map + + #: The :meth:`test_client` method creates an instance of this test + #: client class. Defaults to :class:`~flask.testing.FlaskClient`. + #: + #: .. versionadded:: 0.7 + test_client_class: type[FlaskClient] | None = None + + #: The :class:`~click.testing.CliRunner` subclass, by default + #: :class:`~flask.testing.FlaskCliRunner` that is used by + #: :meth:`test_cli_runner`. Its ``__init__`` method should take a + #: Flask app object as the first argument. + #: + #: .. versionadded:: 1.0 + test_cli_runner_class: type[FlaskCliRunner] | None = None + + #: the session interface to use. By default an instance of + #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. + #: + #: .. versionadded:: 0.8 + session_interface: SessionInterface = SecureCookieSessionInterface() + + def __init__( + self, + import_name: str, + static_url_path: str | None = None, + static_folder: str | os.PathLike | None = "static", + static_host: str | None = None, + host_matching: bool = False, + subdomain_matching: bool = False, + template_folder: str | os.PathLike | None = "templates", + instance_path: str | None = None, + instance_relative_config: bool = False, + root_path: str | None = None, + ): + super().__init__( + import_name=import_name, + static_folder=static_folder, + static_url_path=static_url_path, + template_folder=template_folder, + root_path=root_path, + ) + + if instance_path is None: + instance_path = self.auto_find_instance_path() + elif not os.path.isabs(instance_path): + raise ValueError( + "If an instance path is provided it must be absolute." + " A relative path was given instead." + ) + + #: Holds the path to the instance folder. + #: + #: .. versionadded:: 0.8 + self.instance_path = instance_path + + #: The configuration dictionary as :class:`Config`. This behaves + #: exactly like a regular dictionary but supports additional methods + #: to load a config from files. + self.config = self.make_config(instance_relative_config) + + #: An instance of :attr:`aborter_class` created by + #: :meth:`make_aborter`. This is called by :func:`flask.abort` + #: to raise HTTP errors, and can be called directly as well. + #: + #: .. versionadded:: 2.2 + #: Moved from ``flask.abort``, which calls this object. + self.aborter = self.make_aborter() + + self.json: JSONProvider = self.json_provider_class(self) + """Provides access to JSON methods. Functions in ``flask.json`` + will call methods on this provider when the application context + is active. Used for handling JSON requests and responses. + + An instance of :attr:`json_provider_class`. Can be customized by + changing that attribute on a subclass, or by assigning to this + attribute afterwards. + + The default, :class:`~flask.json.provider.DefaultJSONProvider`, + uses Python's built-in :mod:`json` library. A different provider + can use a different JSON library. + + .. versionadded:: 2.2 + """ + + #: A list of functions that are called by + #: :meth:`handle_url_build_error` when :meth:`.url_for` raises a + #: :exc:`~werkzeug.routing.BuildError`. Each function is called + #: with ``error``, ``endpoint`` and ``values``. If a function + #: returns ``None`` or raises a ``BuildError``, it is skipped. + #: Otherwise, its return value is returned by ``url_for``. + #: + #: .. versionadded:: 0.9 + self.url_build_error_handlers: list[ + t.Callable[[Exception, str, dict[str, t.Any]], str] + ] = [] + + #: A list of functions that are called when the application context + #: is destroyed. Since the application context is also torn down + #: if the request ends this is the place to store code that disconnects + #: from databases. + #: + #: .. versionadded:: 0.9 + self.teardown_appcontext_funcs: list[ft.TeardownCallable] = [] + + #: A list of shell context processor functions that should be run + #: when a shell context is created. + #: + #: .. versionadded:: 0.11 + self.shell_context_processors: list[ft.ShellContextProcessorCallable] = [] + + #: Maps registered blueprint names to blueprint objects. The + #: dict retains the order the blueprints were registered in. + #: Blueprints can be registered multiple times, this dict does + #: not track how often they were attached. + #: + #: .. versionadded:: 0.7 + self.blueprints: dict[str, Blueprint] = {} + + #: a place where extensions can store application specific state. For + #: example this is where an extension could store database engines and + #: similar things. + #: + #: The key must match the name of the extension module. For example in + #: case of a "Flask-Foo" extension in `flask_foo`, the key would be + #: ``'foo'``. + #: + #: .. versionadded:: 0.7 + self.extensions: dict = {} + + #: The :class:`~werkzeug.routing.Map` for this instance. You can use + #: this to change the routing converters after the class was created + #: but before any routes are connected. Example:: + #: + #: from werkzeug.routing import BaseConverter + #: + #: class ListConverter(BaseConverter): + #: def to_python(self, value): + #: return value.split(',') + #: def to_url(self, values): + #: return ','.join(super(ListConverter, self).to_url(value) + #: for value in values) + #: + #: app = Flask(__name__) + #: app.url_map.converters['list'] = ListConverter + self.url_map = self.url_map_class() + + self.url_map.host_matching = host_matching + self.subdomain_matching = subdomain_matching + + # tracks internally if the application already handled at least one + # request. + self._got_first_request = False + + # Add a static route using the provided static_url_path, static_host, + # and static_folder if there is a configured static_folder. + # Note we do this without checking if static_folder exists. + # For one, it might be created while the server is running (e.g. during + # development). Also, Google App Engine stores static files somewhere + if self.has_static_folder: + assert ( + bool(static_host) == host_matching + ), "Invalid static_host/host_matching combination" + # Use a weakref to avoid creating a reference cycle between the app + # and the view function (see #3761). + self_ref = weakref.ref(self) + self.add_url_rule( + f"{self.static_url_path}/<path:filename>", + endpoint="static", + host=static_host, + view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 + ) + + # Set the name of the Click group in case someone wants to add + # the app's commands to another CLI tool. + self.cli.name = self.name + + def _check_setup_finished(self, f_name: str) -> None: + if self._got_first_request: + raise AssertionError( + f"The setup method '{f_name}' can no longer be called" + " on the application. It has already handled its first" + " request, any changes will not be applied" + " consistently.\n" + "Make sure all imports, decorators, functions, etc." + " needed to set up the application are done before" + " running it." + ) + + @cached_property + def name(self) -> str: # type: ignore + """The name of the application. This is usually the import name + with the difference that it's guessed from the run file if the + import name is main. This name is used as a display name when + Flask needs the name of the application. It can be set and overridden + to change the value. + + .. versionadded:: 0.8 + """ + if self.import_name == "__main__": + fn = getattr(sys.modules["__main__"], "__file__", None) + if fn is None: + return "__main__" + return os.path.splitext(os.path.basename(fn))[0] + return self.import_name + + @cached_property + def logger(self) -> logging.Logger: + """A standard Python :class:`~logging.Logger` for the app, with + the same name as :attr:`name`. + + In debug mode, the logger's :attr:`~logging.Logger.level` will + be set to :data:`~logging.DEBUG`. + + If there are no handlers configured, a default handler will be + added. See :doc:`/logging` for more information. + + .. versionchanged:: 1.1.0 + The logger takes the same name as :attr:`name` rather than + hard-coding ``"flask.app"``. + + .. versionchanged:: 1.0.0 + Behavior was simplified. The logger is always named + ``"flask.app"``. The level is only set during configuration, + it doesn't check ``app.debug`` each time. Only one format is + used, not different ones depending on ``app.debug``. No + handlers are removed, and a handler is only added if no + handlers are already configured. + + .. versionadded:: 0.3 + """ + return create_logger(self) + + @cached_property + def jinja_env(self) -> Environment: + """The Jinja environment used to load templates. + + The environment is created the first time this property is + accessed. Changing :attr:`jinja_options` after that will have no + effect. + """ + return self.create_jinja_environment() + + @property + def got_first_request(self) -> bool: + """This attribute is set to ``True`` if the application started + handling the first request. + + .. deprecated:: 2.3 + Will be removed in Flask 2.4. + + .. versionadded:: 0.8 + """ + import warnings + + warnings.warn( + "'got_first_request' is deprecated and will be removed in Flask 2.4.", + DeprecationWarning, + stacklevel=2, + ) + return self._got_first_request + + def make_config(self, instance_relative: bool = False) -> Config: + """Used to create the config attribute by the Flask constructor. + The `instance_relative` parameter is passed in from the constructor + of Flask (there named `instance_relative_config`) and indicates if + the config should be relative to the instance path or the root path + of the application. + + .. versionadded:: 0.8 + """ + root_path = self.root_path + if instance_relative: + root_path = self.instance_path + defaults = dict(self.default_config) + defaults["DEBUG"] = get_debug_flag() + return self.config_class(root_path, defaults) + + def make_aborter(self) -> Aborter: + """Create the object to assign to :attr:`aborter`. That object + is called by :func:`flask.abort` to raise HTTP errors, and can + be called directly as well. + + By default, this creates an instance of :attr:`aborter_class`, + which defaults to :class:`werkzeug.exceptions.Aborter`. + + .. versionadded:: 2.2 + """ + return self.aborter_class() + + def auto_find_instance_path(self) -> str: + """Tries to locate the instance path if it was not provided to the + constructor of the application class. It will basically calculate + the path to a folder named ``instance`` next to your main file or + the package. + + .. versionadded:: 0.8 + """ + prefix, package_path = find_package(self.import_name) + if prefix is None: + return os.path.join(package_path, "instance") + return os.path.join(prefix, "var", f"{self.name}-instance") + + def open_instance_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: + """Opens a resource from the application's instance folder + (:attr:`instance_path`). Otherwise works like + :meth:`open_resource`. Instance resources can also be opened for + writing. + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: resource file opening mode, default is 'rb'. + """ + return open(os.path.join(self.instance_path, resource), mode) + + def create_jinja_environment(self) -> Environment: + """Create the Jinja environment based on :attr:`jinja_options` + and the various Jinja-related methods of the app. Changing + :attr:`jinja_options` after this will have no effect. Also adds + Flask-related globals and filters to the environment. + + .. versionchanged:: 0.11 + ``Environment.auto_reload`` set in accordance with + ``TEMPLATES_AUTO_RELOAD`` configuration option. + + .. versionadded:: 0.5 + """ + options = dict(self.jinja_options) + + if "autoescape" not in options: + options["autoescape"] = self.select_jinja_autoescape + + if "auto_reload" not in options: + auto_reload = self.config["TEMPLATES_AUTO_RELOAD"] + + if auto_reload is None: + auto_reload = self.debug + + options["auto_reload"] = auto_reload + + rv = self.jinja_environment(self, **options) + rv.globals.update( + url_for=self.url_for, + get_flashed_messages=get_flashed_messages, + config=self.config, + # request, session and g are normally added with the + # context processor for efficiency reasons but for imported + # templates we also want the proxies in there. + request=request, + session=session, + g=g, + ) + rv.policies["json.dumps_function"] = self.json.dumps + return rv + + def create_global_jinja_loader(self) -> DispatchingJinjaLoader: + """Creates the loader for the Jinja2 environment. Can be used to + override just the loader and keeping the rest unchanged. It's + discouraged to override this function. Instead one should override + the :meth:`jinja_loader` function instead. + + The global loader dispatches between the loaders of the application + and the individual blueprints. + + .. versionadded:: 0.7 + """ + return DispatchingJinjaLoader(self) + + def select_jinja_autoescape(self, filename: str) -> bool: + """Returns ``True`` if autoescaping should be active for the given + template name. If no template name is given, returns `True`. + + .. versionchanged:: 2.2 + Autoescaping is now enabled by default for ``.svg`` files. + + .. versionadded:: 0.5 + """ + if filename is None: + return True + return filename.endswith((".html", ".htm", ".xml", ".xhtml", ".svg")) + + def update_template_context(self, context: dict) -> None: + """Update the template context with some commonly used variables. + This injects request, session, config and g into the template + context as well as everything template context processors want + to inject. Note that the as of Flask 0.6, the original values + in the context will not be overridden if a context processor + decides to return a value with the same key. + + :param context: the context as a dictionary that is updated in place + to add extra variables. + """ + names: t.Iterable[str | None] = (None,) + + # A template may be rendered outside a request context. + if request: + names = chain(names, reversed(request.blueprints)) + + # The values passed to render_template take precedence. Keep a + # copy to re-apply after all context functions. + orig_ctx = context.copy() + + for name in names: + if name in self.template_context_processors: + for func in self.template_context_processors[name]: + context.update(func()) + + context.update(orig_ctx) + + def make_shell_context(self) -> dict: + """Returns the shell context for an interactive shell for this + application. This runs all the registered shell context + processors. + + .. versionadded:: 0.11 + """ + rv = {"app": self, "g": g} + for processor in self.shell_context_processors: + rv.update(processor()) + return rv + + @property + def debug(self) -> bool: + """Whether debug mode is enabled. When using ``flask run`` to start the + development server, an interactive debugger will be shown for unhandled + exceptions, and the server will be reloaded when code changes. This maps to the + :data:`DEBUG` config key. It may not behave as expected if set late. + + **Do not enable debug mode when deploying in production.** + + Default: ``False`` + """ + return self.config["DEBUG"] + + @debug.setter + def debug(self, value: bool) -> None: + self.config["DEBUG"] = value + + if self.config["TEMPLATES_AUTO_RELOAD"] is None: + self.jinja_env.auto_reload = value + + def run( + self, + host: str | None = None, + port: int | None = None, + debug: bool | None = None, + load_dotenv: bool = True, + **options: t.Any, + ) -> None: + """Runs the application on a local development server. + + Do not use ``run()`` in a production setting. It is not intended to + meet security and performance requirements for a production server. + Instead, see :doc:`/deploying/index` for WSGI server recommendations. + + If the :attr:`debug` flag is set the server will automatically reload + for code changes and show a debugger in case an exception happened. + + If you want to run the application in debug mode, but disable the + code execution on the interactive debugger, you can pass + ``use_evalex=False`` as parameter. This will keep the debugger's + traceback screen active, but disable code execution. + + It is not recommended to use this function for development with + automatic reloading as this is badly supported. Instead you should + be using the :command:`flask` command line script's ``run`` support. + + .. admonition:: Keep in Mind + + Flask will suppress any server error with a generic error page + unless it is in debug mode. As such to enable just the + interactive debugger without the code reloading, you have to + invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. + Setting ``use_debugger`` to ``True`` without being in debug mode + won't catch any exceptions because there won't be any to + catch. + + :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to + have the server available externally as well. Defaults to + ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable + if present. + :param port: the port of the webserver. Defaults to ``5000`` or the + port defined in the ``SERVER_NAME`` config variable if present. + :param debug: if given, enable or disable debug mode. See + :attr:`debug`. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param options: the options to be forwarded to the underlying Werkzeug + server. See :func:`werkzeug.serving.run_simple` for more + information. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment + variables from :file:`.env` and :file:`.flaskenv` files. + + The :envvar:`FLASK_DEBUG` environment variable will override :attr:`debug`. + + Threaded mode is enabled by default. + + .. versionchanged:: 0.10 + The default port is now picked from the ``SERVER_NAME`` + variable. + """ + # Ignore this call so that it doesn't start another server if + # the 'flask run' command is used. + if os.environ.get("FLASK_RUN_FROM_CLI") == "true": + if not is_running_from_reloader(): + click.secho( + " * Ignoring a call to 'app.run()' that would block" + " the current 'flask' CLI command.\n" + " Only call 'app.run()' in an 'if __name__ ==" + ' "__main__"\' guard.', + fg="red", + ) + + return + + if get_load_dotenv(load_dotenv): + cli.load_dotenv() + + # if set, env var overrides existing value + if "FLASK_DEBUG" in os.environ: + self.debug = get_debug_flag() + + # debug passed to method overrides all other sources + if debug is not None: + self.debug = bool(debug) + + server_name = self.config.get("SERVER_NAME") + sn_host = sn_port = None + + if server_name: + sn_host, _, sn_port = server_name.partition(":") + + if not host: + if sn_host: + host = sn_host + else: + host = "127.0.0.1" + + if port or port == 0: + port = int(port) + elif sn_port: + port = int(sn_port) + else: + port = 5000 + + options.setdefault("use_reloader", self.debug) + options.setdefault("use_debugger", self.debug) + options.setdefault("threaded", True) + + cli.show_server_banner(self.debug, self.name) + + from werkzeug.serving import run_simple + + try: + run_simple(t.cast(str, host), port, self, **options) + finally: + # reset the first request information if the development server + # reset normally. This makes it possible to restart the server + # without reloader and that stuff from an interactive shell. + self._got_first_request = False + + def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> FlaskClient: + """Creates a test client for this application. For information + about unit testing head over to :doc:`/testing`. + + Note that if you are testing for assertions or exceptions in your + application code, you must set ``app.testing = True`` in order for the + exceptions to propagate to the test client. Otherwise, the exception + will be handled by the application (not visible to the test client) and + the only indication of an AssertionError or other exception will be a + 500 status code response to the test client. See the :attr:`testing` + attribute. For example:: + + app.testing = True + client = app.test_client() + + The test client can be used in a ``with`` block to defer the closing down + of the context until the end of the ``with`` block. This is useful if + you want to access the context locals for testing:: + + with app.test_client() as c: + rv = c.get('/?vodka=42') + assert request.args['vodka'] == '42' + + Additionally, you may pass optional keyword arguments that will then + be passed to the application's :attr:`test_client_class` constructor. + For example:: + + from flask.testing import FlaskClient + + class CustomClient(FlaskClient): + def __init__(self, *args, **kwargs): + self._authentication = kwargs.pop("authentication") + super(CustomClient,self).__init__( *args, **kwargs) + + app.test_client_class = CustomClient + client = app.test_client(authentication='Basic ....') + + See :class:`~flask.testing.FlaskClient` for more information. + + .. versionchanged:: 0.4 + added support for ``with`` block usage for the client. + + .. versionadded:: 0.7 + The `use_cookies` parameter was added as well as the ability + to override the client to be used by setting the + :attr:`test_client_class` attribute. + + .. versionchanged:: 0.11 + Added `**kwargs` to support passing additional keyword arguments to + the constructor of :attr:`test_client_class`. + """ + cls = self.test_client_class + if cls is None: + from .testing import FlaskClient as cls + return cls( # type: ignore + self, self.response_class, use_cookies=use_cookies, **kwargs + ) + + def test_cli_runner(self, **kwargs: t.Any) -> FlaskCliRunner: + """Create a CLI runner for testing CLI commands. + See :ref:`testing-cli`. + + Returns an instance of :attr:`test_cli_runner_class`, by default + :class:`~flask.testing.FlaskCliRunner`. The Flask app object is + passed as the first argument. + + .. versionadded:: 1.0 + """ + cls = self.test_cli_runner_class + + if cls is None: + from .testing import FlaskCliRunner as cls + + return cls(self, **kwargs) # type: ignore + + @setupmethod + def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: + """Register a :class:`~flask.Blueprint` on the application. Keyword + arguments passed to this method will override the defaults set on the + blueprint. + + Calls the blueprint's :meth:`~flask.Blueprint.register` method after + recording the blueprint in the application's :attr:`blueprints`. + + :param blueprint: The blueprint to register. + :param url_prefix: Blueprint routes will be prefixed with this. + :param subdomain: Blueprint routes will match on this subdomain. + :param url_defaults: Blueprint routes will use these default values for + view arguments. + :param options: Additional keyword arguments are passed to + :class:`~flask.blueprints.BlueprintSetupState`. They can be + accessed in :meth:`~flask.Blueprint.record` callbacks. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionadded:: 0.7 + """ + blueprint.register(self, options) + + def iter_blueprints(self) -> t.ValuesView[Blueprint]: + """Iterates over all blueprints by the order they were registered. + + .. versionadded:: 0.11 + """ + return self.blueprints.values() + + @setupmethod + def add_url_rule( + self, + rule: str, + endpoint: str | None = None, + view_func: ft.RouteCallable | None = None, + provide_automatic_options: bool | None = None, + **options: t.Any, + ) -> None: + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) # type: ignore + options["endpoint"] = endpoint + methods = options.pop("methods", None) + + # if the methods are not given and the view_func object knows its + # methods we can use that instead. If neither exists, we go with + # a tuple of only ``GET`` as default. + if methods is None: + methods = getattr(view_func, "methods", None) or ("GET",) + if isinstance(methods, str): + raise TypeError( + "Allowed methods must be a list of strings, for" + ' example: @app.route(..., methods=["POST"])' + ) + methods = {item.upper() for item in methods} + + # Methods that should always be added + required_methods = set(getattr(view_func, "required_methods", ())) + + # starting with Flask 0.8 the view_func object can disable and + # force-enable the automatic options handling. + if provide_automatic_options is None: + provide_automatic_options = getattr( + view_func, "provide_automatic_options", None + ) + + if provide_automatic_options is None: + if "OPTIONS" not in methods: + provide_automatic_options = True + required_methods.add("OPTIONS") + else: + provide_automatic_options = False + + # Add the required methods now. + methods |= required_methods + + rule = self.url_rule_class(rule, methods=methods, **options) + rule.provide_automatic_options = provide_automatic_options # type: ignore + + self.url_map.add(rule) + if view_func is not None: + old_func = self.view_functions.get(endpoint) + if old_func is not None and old_func != view_func: + raise AssertionError( + "View function mapping is overwriting an existing" + f" endpoint function: {endpoint}" + ) + self.view_functions[endpoint] = view_func + + @setupmethod + def template_filter( + self, name: str | None = None + ) -> t.Callable[[T_template_filter], T_template_filter]: + """A decorator that is used to register custom template filter. + You can specify a name for the filter, otherwise the function + name will be used. Example:: + + @app.template_filter() + def reverse(s): + return s[::-1] + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f: T_template_filter) -> T_template_filter: + self.add_template_filter(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_filter( + self, f: ft.TemplateFilterCallable, name: str | None = None + ) -> None: + """Register a custom template filter. Works exactly like the + :meth:`template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + self.jinja_env.filters[name or f.__name__] = f + + @setupmethod + def template_test( + self, name: str | None = None + ) -> t.Callable[[T_template_test], T_template_test]: + """A decorator that is used to register custom template test. + You can specify a name for the test, otherwise the function + name will be used. Example:: + + @app.template_test() + def is_prime(n): + if n == 2: + return True + for i in range(2, int(math.ceil(math.sqrt(n))) + 1): + if n % i == 0: + return False + return True + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f: T_template_test) -> T_template_test: + self.add_template_test(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_test( + self, f: ft.TemplateTestCallable, name: str | None = None + ) -> None: + """Register a custom template test. Works exactly like the + :meth:`template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + self.jinja_env.tests[name or f.__name__] = f + + @setupmethod + def template_global( + self, name: str | None = None + ) -> t.Callable[[T_template_global], T_template_global]: + """A decorator that is used to register a custom template global function. + You can specify a name for the global function, otherwise the function + name will be used. Example:: + + @app.template_global() + def double(n): + return 2 * n + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + + def decorator(f: T_template_global) -> T_template_global: + self.add_template_global(f, name=name) + return f + + return decorator + + @setupmethod + def add_template_global( + self, f: ft.TemplateGlobalCallable, name: str | None = None + ) -> None: + """Register a custom template global function. Works exactly like the + :meth:`template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + self.jinja_env.globals[name or f.__name__] = f + + @setupmethod + def teardown_appcontext(self, f: T_teardown) -> T_teardown: + """Registers a function to be called when the application + context is popped. The application context is typically popped + after the request context for each request, at the end of CLI + commands, or after a manually pushed context ends. + + .. code-block:: python + + with app.app_context(): + ... + + When the ``with`` block exits (or ``ctx.pop()`` is called), the + teardown functions are called just before the app context is + made inactive. Since a request context typically also manages an + application context it would also be called when you pop a + request context. + + When a teardown function was called because of an unhandled + exception it will be passed an error object. If an + :meth:`errorhandler` is registered, it will handle the exception + and the teardown will not receive it. + + Teardown functions must avoid raising exceptions. If they + execute code that might fail they must surround that code with a + ``try``/``except`` block and log any errors. + + The return values of teardown functions are ignored. + + .. versionadded:: 0.9 + """ + self.teardown_appcontext_funcs.append(f) + return f + + @setupmethod + def shell_context_processor( + self, f: T_shell_context_processor + ) -> T_shell_context_processor: + """Registers a shell context processor function. + + .. versionadded:: 0.11 + """ + self.shell_context_processors.append(f) + return f + + def _find_error_handler(self, e: Exception) -> ft.ErrorHandlerCallable | None: + """Return a registered error handler for an exception in this order: + blueprint handler for a specific code, app handler for a specific code, + blueprint handler for an exception class, app handler for an exception + class, or ``None`` if a suitable handler is not found. + """ + exc_class, code = self._get_exc_class_and_code(type(e)) + names = (*request.blueprints, None) + + for c in (code, None) if code is not None else (None,): + for name in names: + handler_map = self.error_handler_spec[name][c] + + if not handler_map: + continue + + for cls in exc_class.__mro__: + handler = handler_map.get(cls) + + if handler is not None: + return handler + return None + + def handle_http_exception( + self, e: HTTPException + ) -> HTTPException | ft.ResponseReturnValue: + """Handles an HTTP exception. By default this will invoke the + registered error handlers and fall back to returning the + exception as response. + + .. versionchanged:: 1.0.3 + ``RoutingException``, used internally for actions such as + slash redirects during routing, is not passed to error + handlers. + + .. versionchanged:: 1.0 + Exceptions are looked up by code *and* by MRO, so + ``HTTPException`` subclasses can be handled with a catch-all + handler for the base ``HTTPException``. + + .. versionadded:: 0.3 + """ + # Proxy exceptions don't have error codes. We want to always return + # those unchanged as errors + if e.code is None: + return e + + # RoutingExceptions are used internally to trigger routing + # actions, such as slash redirects raising RequestRedirect. They + # are not raised or handled in user code. + if isinstance(e, RoutingException): + return e + + handler = self._find_error_handler(e) + if handler is None: + return e + return self.ensure_sync(handler)(e) + + def trap_http_exception(self, e: Exception) -> bool: + """Checks if an HTTP exception should be trapped or not. By default + this will return ``False`` for all exceptions except for a bad request + key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It + also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. + + This is called for all HTTP exceptions raised by a view function. + If it returns ``True`` for any exception the error handler for this + exception is not called and it shows up as regular exception in the + traceback. This is helpful for debugging implicitly raised HTTP + exceptions. + + .. versionchanged:: 1.0 + Bad request errors are not trapped by default in debug mode. + + .. versionadded:: 0.8 + """ + if self.config["TRAP_HTTP_EXCEPTIONS"]: + return True + + trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] + + # if unset, trap key errors in debug mode + if ( + trap_bad_request is None + and self.debug + and isinstance(e, BadRequestKeyError) + ): + return True + + if trap_bad_request: + return isinstance(e, BadRequest) + + return False + + def handle_user_exception( + self, e: Exception + ) -> HTTPException | ft.ResponseReturnValue: + """This method is called whenever an exception occurs that + should be handled. A special case is :class:`~werkzeug + .exceptions.HTTPException` which is forwarded to the + :meth:`handle_http_exception` method. This function will either + return a response value or reraise the exception with the same + traceback. + + .. versionchanged:: 1.0 + Key errors raised from request data like ``form`` show the + bad key in debug mode rather than a generic bad request + message. + + .. versionadded:: 0.7 + """ + if isinstance(e, BadRequestKeyError) and ( + self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] + ): + e.show_exception = True + + if isinstance(e, HTTPException) and not self.trap_http_exception(e): + return self.handle_http_exception(e) + + handler = self._find_error_handler(e) + + if handler is None: + raise + + return self.ensure_sync(handler)(e) + + def handle_exception(self, e: Exception) -> Response: + """Handle an exception that did not have an error handler + associated with it, or that was raised from an error handler. + This always causes a 500 ``InternalServerError``. + + Always sends the :data:`got_request_exception` signal. + + If :data:`PROPAGATE_EXCEPTIONS` is ``True``, such as in debug + mode, the error will be re-raised so that the debugger can + display it. Otherwise, the original exception is logged, and + an :exc:`~werkzeug.exceptions.InternalServerError` is returned. + + If an error handler is registered for ``InternalServerError`` or + ``500``, it will be used. For consistency, the handler will + always receive the ``InternalServerError``. The original + unhandled exception is available as ``e.original_exception``. + + .. versionchanged:: 1.1.0 + Always passes the ``InternalServerError`` instance to the + handler, setting ``original_exception`` to the unhandled + error. + + .. versionchanged:: 1.1.0 + ``after_request`` functions and other finalization is done + even for the default 500 response when there is no handler. + + .. versionadded:: 0.3 + """ + exc_info = sys.exc_info() + got_request_exception.send(self, _async_wrapper=self.ensure_sync, exception=e) + propagate = self.config["PROPAGATE_EXCEPTIONS"] + + if propagate is None: + propagate = self.testing or self.debug + + if propagate: + # Re-raise if called with an active exception, otherwise + # raise the passed in exception. + if exc_info[1] is e: + raise + + raise e + + self.log_exception(exc_info) + server_error: InternalServerError | ft.ResponseReturnValue + server_error = InternalServerError(original_exception=e) + handler = self._find_error_handler(server_error) + + if handler is not None: + server_error = self.ensure_sync(handler)(server_error) + + return self.finalize_request(server_error, from_error_handler=True) + + def log_exception( + self, + exc_info: (tuple[type, BaseException, TracebackType] | tuple[None, None, None]), + ) -> None: + """Logs an exception. This is called by :meth:`handle_exception` + if debugging is disabled and right before the handler is called. + The default implementation logs the exception as error on the + :attr:`logger`. + + .. versionadded:: 0.8 + """ + self.logger.error( + f"Exception on {request.path} [{request.method}]", exc_info=exc_info + ) + + def raise_routing_exception(self, request: Request) -> t.NoReturn: + """Intercept routing exceptions and possibly do something else. + + In debug mode, intercept a routing redirect and replace it with + an error if the body will be discarded. + + With modern Werkzeug this shouldn't occur, since it now uses a + 308 status which tells the browser to resend the method and + body. + + .. versionchanged:: 2.1 + Don't intercept 307 and 308 redirects. + + :meta private: + :internal: + """ + if ( + not self.debug + or not isinstance(request.routing_exception, RequestRedirect) + or request.routing_exception.code in {307, 308} + or request.method in {"GET", "HEAD", "OPTIONS"} + ): + raise request.routing_exception # type: ignore + + from .debughelpers import FormDataRoutingRedirect + + raise FormDataRoutingRedirect(request) + + def dispatch_request(self) -> ft.ResponseReturnValue: + """Does the request dispatching. Matches the URL and returns the + return value of the view or error handler. This does not have to + be a response object. In order to convert the return value to a + proper response object, call :func:`make_response`. + + .. versionchanged:: 0.7 + This no longer does the exception handling, this code was + moved to the new :meth:`full_dispatch_request`. + """ + req = request_ctx.request + if req.routing_exception is not None: + self.raise_routing_exception(req) + rule: Rule = req.url_rule # type: ignore[assignment] + # if we provide automatic options for this URL and the + # request came with the OPTIONS method, reply automatically + if ( + getattr(rule, "provide_automatic_options", False) + and req.method == "OPTIONS" + ): + return self.make_default_options_response() + # otherwise dispatch to the handler for that endpoint + view_args: dict[str, t.Any] = req.view_args # type: ignore[assignment] + return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) + + def full_dispatch_request(self) -> Response: + """Dispatches the request and on top of that performs request + pre and postprocessing as well as HTTP exception catching and + error handling. + + .. versionadded:: 0.7 + """ + self._got_first_request = True + + try: + request_started.send(self, _async_wrapper=self.ensure_sync) + rv = self.preprocess_request() + if rv is None: + rv = self.dispatch_request() + except Exception as e: + rv = self.handle_user_exception(e) + return self.finalize_request(rv) + + def finalize_request( + self, + rv: ft.ResponseReturnValue | HTTPException, + from_error_handler: bool = False, + ) -> Response: + """Given the return value from a view function this finalizes + the request by converting it into a response and invoking the + postprocessing functions. This is invoked for both normal + request dispatching as well as error handlers. + + Because this means that it might be called as a result of a + failure a special safe mode is available which can be enabled + with the `from_error_handler` flag. If enabled, failures in + response processing will be logged and otherwise ignored. + + :internal: + """ + response = self.make_response(rv) + try: + response = self.process_response(response) + request_finished.send( + self, _async_wrapper=self.ensure_sync, response=response + ) + except Exception: + if not from_error_handler: + raise + self.logger.exception( + "Request finalizing failed with an error while handling an error" + ) + return response + + def make_default_options_response(self) -> Response: + """This method is called to create the default ``OPTIONS`` response. + This can be changed through subclassing to change the default + behavior of ``OPTIONS`` responses. + + .. versionadded:: 0.7 + """ + adapter = request_ctx.url_adapter + methods = adapter.allowed_methods() # type: ignore[union-attr] + rv = self.response_class() + rv.allow.update(methods) + return rv + + def should_ignore_error(self, error: BaseException | None) -> bool: + """This is called to figure out if an error should be ignored + or not as far as the teardown system is concerned. If this + function returns ``True`` then the teardown handlers will not be + passed the error. + + .. versionadded:: 0.10 + """ + return False + + def ensure_sync(self, func: t.Callable) -> t.Callable: + """Ensure that the function is synchronous for WSGI workers. + Plain ``def`` functions are returned as-is. ``async def`` + functions are wrapped to run and wait for the response. + + Override this method to change how the app runs async views. + + .. versionadded:: 2.0 + """ + if iscoroutinefunction(func): + return self.async_to_sync(func) + + return func + + def async_to_sync( + self, func: t.Callable[..., t.Coroutine] + ) -> t.Callable[..., t.Any]: + """Return a sync function that will run the coroutine function. + + .. code-block:: python + + result = app.async_to_sync(func)(*args, **kwargs) + + Override this method to change how the app converts async code + to be synchronously callable. + + .. versionadded:: 2.0 + """ + try: + from asgiref.sync import async_to_sync as asgiref_async_to_sync + except ImportError: + raise RuntimeError( + "Install Flask with the 'async' extra in order to use async views." + ) from None + + return asgiref_async_to_sync(func) + + def url_for( + self, + endpoint: str, + *, + _anchor: str | None = None, + _method: str | None = None, + _scheme: str | None = None, + _external: bool | None = None, + **values: t.Any, + ) -> str: + """Generate a URL to the given endpoint with the given values. + + This is called by :func:`flask.url_for`, and can be called + directly as well. + + An *endpoint* is the name of a URL rule, usually added with + :meth:`@app.route() <route>`, and usually the same name as the + view function. A route defined in a :class:`~flask.Blueprint` + will prepend the blueprint's name separated by a ``.`` to the + endpoint. + + In some cases, such as email messages, you want URLs to include + the scheme and domain, like ``https://example.com/hello``. When + not in an active request, URLs will be external by default, but + this requires setting :data:`SERVER_NAME` so Flask knows what + domain to use. :data:`APPLICATION_ROOT` and + :data:`PREFERRED_URL_SCHEME` should also be configured as + needed. This config is only used when not in an active request. + + Functions can be decorated with :meth:`url_defaults` to modify + keyword arguments before the URL is built. + + If building fails for some reason, such as an unknown endpoint + or incorrect values, the app's :meth:`handle_url_build_error` + method is called. If that returns a string, that is returned, + otherwise a :exc:`~werkzeug.routing.BuildError` is raised. + + :param endpoint: The endpoint name associated with the URL to + generate. If this starts with a ``.``, the current blueprint + name (if any) will be used. + :param _anchor: If given, append this as ``#anchor`` to the URL. + :param _method: If given, generate the URL associated with this + method for the endpoint. + :param _scheme: If given, the URL will have this scheme if it + is external. + :param _external: If given, prefer the URL to be internal + (False) or require it to be external (True). External URLs + include the scheme and domain. When not in an active + request, URLs are external by default. + :param values: Values to use for the variable parts of the URL + rule. Unknown keys are appended as query string arguments, + like ``?a=b&c=d``. + + .. versionadded:: 2.2 + Moved from ``flask.url_for``, which calls this method. + """ + req_ctx = _cv_request.get(None) + + if req_ctx is not None: + url_adapter = req_ctx.url_adapter + blueprint_name = req_ctx.request.blueprint + + # If the endpoint starts with "." and the request matches a + # blueprint, the endpoint is relative to the blueprint. + if endpoint[:1] == ".": + if blueprint_name is not None: + endpoint = f"{blueprint_name}{endpoint}" + else: + endpoint = endpoint[1:] + + # When in a request, generate a URL without scheme and + # domain by default, unless a scheme is given. + if _external is None: + _external = _scheme is not None + else: + app_ctx = _cv_app.get(None) + + # If called by helpers.url_for, an app context is active, + # use its url_adapter. Otherwise, app.url_for was called + # directly, build an adapter. + if app_ctx is not None: + url_adapter = app_ctx.url_adapter + else: + url_adapter = self.create_url_adapter(None) + + if url_adapter is None: + raise RuntimeError( + "Unable to build URLs outside an active request" + " without 'SERVER_NAME' configured. Also configure" + " 'APPLICATION_ROOT' and 'PREFERRED_URL_SCHEME' as" + " needed." + ) + + # When outside a request, generate a URL with scheme and + # domain by default. + if _external is None: + _external = True + + # It is an error to set _scheme when _external=False, in order + # to avoid accidental insecure URLs. + if _scheme is not None and not _external: + raise ValueError("When specifying '_scheme', '_external' must be True.") + + self.inject_url_defaults(endpoint, values) + + try: + rv = url_adapter.build( # type: ignore[union-attr] + endpoint, + values, + method=_method, + url_scheme=_scheme, + force_external=_external, + ) + except BuildError as error: + values.update( + _anchor=_anchor, _method=_method, _scheme=_scheme, _external=_external + ) + return self.handle_url_build_error(error, endpoint, values) + + if _anchor is not None: + _anchor = _url_quote(_anchor, safe="%!#$&'()*+,/:;=?@") + rv = f"{rv}#{_anchor}" + + return rv + + def redirect(self, location: str, code: int = 302) -> BaseResponse: + """Create a redirect response object. + + This is called by :func:`flask.redirect`, and can be called + directly as well. + + :param location: The URL to redirect to. + :param code: The status code for the redirect. + + .. versionadded:: 2.2 + Moved from ``flask.redirect``, which calls this method. + """ + return _wz_redirect(location, code=code, Response=self.response_class) + + def make_response(self, rv: ft.ResponseReturnValue) -> Response: + """Convert the return value from a view function to an instance of + :attr:`response_class`. + + :param rv: the return value from the view function. The view function + must return a response. Returning ``None``, or the view ending + without returning, is not allowed. The following types are allowed + for ``view_rv``: + + ``str`` + A response object is created with the string encoded to UTF-8 + as the body. + + ``bytes`` + A response object is created with the bytes as the body. + + ``dict`` + A dictionary that will be jsonify'd before being returned. + + ``list`` + A list that will be jsonify'd before being returned. + + ``generator`` or ``iterator`` + A generator that returns ``str`` or ``bytes`` to be + streamed as the response. + + ``tuple`` + Either ``(body, status, headers)``, ``(body, status)``, or + ``(body, headers)``, where ``body`` is any of the other types + allowed here, ``status`` is a string or an integer, and + ``headers`` is a dictionary or a list of ``(key, value)`` + tuples. If ``body`` is a :attr:`response_class` instance, + ``status`` overwrites the exiting value and ``headers`` are + extended. + + :attr:`response_class` + The object is returned unchanged. + + other :class:`~werkzeug.wrappers.Response` class + The object is coerced to :attr:`response_class`. + + :func:`callable` + The function is called as a WSGI application. The result is + used to create a response object. + + .. versionchanged:: 2.2 + A generator will be converted to a streaming response. + A list will be converted to a JSON response. + + .. versionchanged:: 1.1 + A dict will be converted to a JSON response. + + .. versionchanged:: 0.9 + Previously a tuple was interpreted as the arguments for the + response object. + """ + + status = headers = None + + # unpack tuple returns + if isinstance(rv, tuple): + len_rv = len(rv) + + # a 3-tuple is unpacked directly + if len_rv == 3: + rv, status, headers = rv # type: ignore[misc] + # decide if a 2-tuple has status or headers + elif len_rv == 2: + if isinstance(rv[1], (Headers, dict, tuple, list)): + rv, headers = rv + else: + rv, status = rv # type: ignore[assignment,misc] + # other sized tuples are not allowed + else: + raise TypeError( + "The view function did not return a valid response tuple." + " The tuple must have the form (body, status, headers)," + " (body, status), or (body, headers)." + ) + + # the body must not be None + if rv is None: + raise TypeError( + f"The view function for {request.endpoint!r} did not" + " return a valid response. The function either returned" + " None or ended without a return statement." + ) + + # make sure the body is an instance of the response class + if not isinstance(rv, self.response_class): + if isinstance(rv, (str, bytes, bytearray)) or isinstance(rv, _abc_Iterator): + # let the response class set the status and headers instead of + # waiting to do it manually, so that the class can handle any + # special logic + rv = self.response_class( + rv, + status=status, + headers=headers, # type: ignore[arg-type] + ) + status = headers = None + elif isinstance(rv, (dict, list)): + rv = self.json.response(rv) + elif isinstance(rv, BaseResponse) or callable(rv): + # evaluate a WSGI callable, or coerce a different response + # class to the correct type + try: + rv = self.response_class.force_type( + rv, request.environ # type: ignore[arg-type] + ) + except TypeError as e: + raise TypeError( + f"{e}\nThe view function did not return a valid" + " response. The return type must be a string," + " dict, list, tuple with headers or status," + " Response instance, or WSGI callable, but it" + f" was a {type(rv).__name__}." + ).with_traceback(sys.exc_info()[2]) from None + else: + raise TypeError( + "The view function did not return a valid" + " response. The return type must be a string," + " dict, list, tuple with headers or status," + " Response instance, or WSGI callable, but it was a" + f" {type(rv).__name__}." + ) + + rv = t.cast(Response, rv) + # prefer the status if it was provided + if status is not None: + if isinstance(status, (str, bytes, bytearray)): + rv.status = status + else: + rv.status_code = status + + # extend existing headers with provided headers + if headers: + rv.headers.update(headers) # type: ignore[arg-type] + + return rv + + def create_url_adapter(self, request: Request | None) -> MapAdapter | None: + """Creates a URL adapter for the given request. The URL adapter + is created at a point where the request context is not yet set + up so the request is passed explicitly. + + .. versionadded:: 0.6 + + .. versionchanged:: 0.9 + This can now also be called without a request object when the + URL adapter is created for the application context. + + .. versionchanged:: 1.0 + :data:`SERVER_NAME` no longer implicitly enables subdomain + matching. Use :attr:`subdomain_matching` instead. + """ + if request is not None: + # If subdomain matching is disabled (the default), use the + # default subdomain in all cases. This should be the default + # in Werkzeug but it currently does not have that feature. + if not self.subdomain_matching: + subdomain = self.url_map.default_subdomain or None + else: + subdomain = None + + return self.url_map.bind_to_environ( + request.environ, + server_name=self.config["SERVER_NAME"], + subdomain=subdomain, + ) + # We need at the very least the server name to be set for this + # to work. + if self.config["SERVER_NAME"] is not None: + return self.url_map.bind( + self.config["SERVER_NAME"], + script_name=self.config["APPLICATION_ROOT"], + url_scheme=self.config["PREFERRED_URL_SCHEME"], + ) + + return None + + def inject_url_defaults(self, endpoint: str, values: dict) -> None: + """Injects the URL defaults for the given endpoint directly into + the values dictionary passed. This is used internally and + automatically called on URL building. + + .. versionadded:: 0.7 + """ + names: t.Iterable[str | None] = (None,) + + # url_for may be called outside a request context, parse the + # passed endpoint instead of using request.blueprints. + if "." in endpoint: + names = chain( + names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) + ) + + for name in names: + if name in self.url_default_functions: + for func in self.url_default_functions[name]: + func(endpoint, values) + + def handle_url_build_error( + self, error: BuildError, endpoint: str, values: dict[str, t.Any] + ) -> str: + """Called by :meth:`.url_for` if a + :exc:`~werkzeug.routing.BuildError` was raised. If this returns + a value, it will be returned by ``url_for``, otherwise the error + will be re-raised. + + Each function in :attr:`url_build_error_handlers` is called with + ``error``, ``endpoint`` and ``values``. If a function returns + ``None`` or raises a ``BuildError``, it is skipped. Otherwise, + its return value is returned by ``url_for``. + + :param error: The active ``BuildError`` being handled. + :param endpoint: The endpoint being built. + :param values: The keyword arguments passed to ``url_for``. + """ + for handler in self.url_build_error_handlers: + try: + rv = handler(error, endpoint, values) + except BuildError as e: + # make error available outside except block + error = e + else: + if rv is not None: + return rv + + # Re-raise if called with an active exception, otherwise raise + # the passed in exception. + if error is sys.exc_info()[1]: + raise + + raise error + + def preprocess_request(self) -> ft.ResponseReturnValue | None: + """Called before the request is dispatched. Calls + :attr:`url_value_preprocessors` registered with the app and the + current blueprint (if any). Then calls :attr:`before_request_funcs` + registered with the app and the blueprint. + + If any :meth:`before_request` handler returns a non-None value, the + value is handled as if it was the return value from the view, and + further request handling is stopped. + """ + names = (None, *reversed(request.blueprints)) + + for name in names: + if name in self.url_value_preprocessors: + for url_func in self.url_value_preprocessors[name]: + url_func(request.endpoint, request.view_args) + + for name in names: + if name in self.before_request_funcs: + for before_func in self.before_request_funcs[name]: + rv = self.ensure_sync(before_func)() + + if rv is not None: + return rv + + return None + + def process_response(self, response: Response) -> Response: + """Can be overridden in order to modify the response object + before it's sent to the WSGI server. By default this will + call all the :meth:`after_request` decorated functions. + + .. versionchanged:: 0.5 + As of Flask 0.5 the functions registered for after request + execution are called in reverse order of registration. + + :param response: a :attr:`response_class` object. + :return: a new response object or the same, has to be an + instance of :attr:`response_class`. + """ + ctx = request_ctx._get_current_object() # type: ignore[attr-defined] + + for func in ctx._after_request_functions: + response = self.ensure_sync(func)(response) + + for name in chain(request.blueprints, (None,)): + if name in self.after_request_funcs: + for func in reversed(self.after_request_funcs[name]): + response = self.ensure_sync(func)(response) + + if not self.session_interface.is_null_session(ctx.session): + self.session_interface.save_session(self, ctx.session, response) + + return response + + def do_teardown_request( + self, exc: BaseException | None = _sentinel # type: ignore + ) -> None: + """Called after the request is dispatched and the response is + returned, right before the request context is popped. + + This calls all functions decorated with + :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` + if a blueprint handled the request. Finally, the + :data:`request_tearing_down` signal is sent. + + This is called by + :meth:`RequestContext.pop() <flask.ctx.RequestContext.pop>`, + which may be delayed during testing to maintain access to + resources. + + :param exc: An unhandled exception raised while dispatching the + request. Detected from the current exception information if + not passed. Passed to each teardown function. + + .. versionchanged:: 0.9 + Added the ``exc`` argument. + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + + for name in chain(request.blueprints, (None,)): + if name in self.teardown_request_funcs: + for func in reversed(self.teardown_request_funcs[name]): + self.ensure_sync(func)(exc) + + request_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) + + def do_teardown_appcontext( + self, exc: BaseException | None = _sentinel # type: ignore + ) -> None: + """Called right before the application context is popped. + + When handling a request, the application context is popped + after the request context. See :meth:`do_teardown_request`. + + This calls all functions decorated with + :meth:`teardown_appcontext`. Then the + :data:`appcontext_tearing_down` signal is sent. + + This is called by + :meth:`AppContext.pop() <flask.ctx.AppContext.pop>`. + + .. versionadded:: 0.9 + """ + if exc is _sentinel: + exc = sys.exc_info()[1] + + for func in reversed(self.teardown_appcontext_funcs): + self.ensure_sync(func)(exc) + + appcontext_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) + + def app_context(self) -> AppContext: + """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` + block to push the context, which will make :data:`current_app` + point at this application. + + An application context is automatically pushed by + :meth:`RequestContext.push() <flask.ctx.RequestContext.push>` + when handling a request, and when running a CLI command. Use + this to manually create a context outside of these situations. + + :: + + with app.app_context(): + init_db() + + See :doc:`/appcontext`. + + .. versionadded:: 0.9 + """ + return AppContext(self) + + def request_context(self, environ: dict) -> RequestContext: + """Create a :class:`~flask.ctx.RequestContext` representing a + WSGI environment. Use a ``with`` block to push the context, + which will make :data:`request` point at this request. + + See :doc:`/reqcontext`. + + Typically you should not call this from your own code. A request + context is automatically pushed by the :meth:`wsgi_app` when + handling a request. Use :meth:`test_request_context` to create + an environment and context instead of this method. + + :param environ: a WSGI environment + """ + return RequestContext(self, environ) + + def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: + """Create a :class:`~flask.ctx.RequestContext` for a WSGI + environment created from the given values. This is mostly useful + during testing, where you may want to run a function that uses + request data without dispatching a full request. + + See :doc:`/reqcontext`. + + Use a ``with`` block to push the context, which will make + :data:`request` point at the request for the created + environment. :: + + with app.test_request_context(...): + generate_report() + + When using the shell, it may be easier to push and pop the + context manually to avoid indentation. :: + + ctx = app.test_request_context(...) + ctx.push() + ... + ctx.pop() + + Takes the same arguments as Werkzeug's + :class:`~werkzeug.test.EnvironBuilder`, with some defaults from + the application. See the linked Werkzeug docs for most of the + available arguments. Flask-specific behavior is listed here. + + :param path: URL path being requested. + :param base_url: Base URL where the app is being served, which + ``path`` is relative to. If not given, built from + :data:`PREFERRED_URL_SCHEME`, ``subdomain``, + :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. + :param subdomain: Subdomain name to append to + :data:`SERVER_NAME`. + :param url_scheme: Scheme to use instead of + :data:`PREFERRED_URL_SCHEME`. + :param data: The request body, either as a string or a dict of + form keys and values. + :param json: If given, this is serialized as JSON and passed as + ``data``. Also defaults ``content_type`` to + ``application/json``. + :param args: other positional arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + :param kwargs: other keyword arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + """ + from .testing import EnvironBuilder + + builder = EnvironBuilder(self, *args, **kwargs) + + try: + return self.request_context(builder.get_environ()) + finally: + builder.close() + + def wsgi_app(self, environ: dict, start_response: t.Callable) -> t.Any: + """The actual WSGI application. This is not implemented in + :meth:`__call__` so that middlewares can be applied without + losing a reference to the app object. Instead of doing this:: + + app = MyMiddleware(app) + + It's a better idea to do this instead:: + + app.wsgi_app = MyMiddleware(app.wsgi_app) + + Then you still have the original application object around and + can continue to call methods on it. + + .. versionchanged:: 0.7 + Teardown events for the request and app contexts are called + even if an unhandled error occurs. Other events may not be + called depending on when an error occurs during dispatch. + See :ref:`callbacks-and-errors`. + + :param environ: A WSGI environment. + :param start_response: A callable accepting a status code, + a list of headers, and an optional exception context to + start the response. + """ + ctx = self.request_context(environ) + error: BaseException | None = None + try: + try: + ctx.push() + response = self.full_dispatch_request() + except Exception as e: + error = e + response = self.handle_exception(e) + except: # noqa: B001 + error = sys.exc_info()[1] + raise + return response(environ, start_response) + finally: + if "werkzeug.debug.preserve_context" in environ: + environ["werkzeug.debug.preserve_context"](_cv_app.get()) + environ["werkzeug.debug.preserve_context"](_cv_request.get()) + + if error is not None and self.should_ignore_error(error): + error = None + + ctx.pop(error) + + def __call__(self, environ: dict, start_response: t.Callable) -> t.Any: + """The WSGI server calls the Flask application object as the + WSGI application. This calls :meth:`wsgi_app`, which can be + wrapped to apply middleware. + """ + return self.wsgi_app(environ, start_response) diff --git a/backend/test/lib/python3.8/site-packages/flask/blueprints.py b/backend/test/lib/python3.8/site-packages/flask/blueprints.py new file mode 100644 index 0000000000000000000000000000000000000000..0407f86fefdca40ce4487ad006a4e65bee0d1ee6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/blueprints.py @@ -0,0 +1,626 @@ +from __future__ import annotations + +import os +import typing as t +from collections import defaultdict +from functools import update_wrapper + +from . import typing as ft +from .scaffold import _endpoint_from_view_func +from .scaffold import _sentinel +from .scaffold import Scaffold +from .scaffold import setupmethod + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + +DeferredSetupFunction = t.Callable[["BlueprintSetupState"], t.Callable] +T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable) +T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) +T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) +T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) +T_template_context_processor = t.TypeVar( + "T_template_context_processor", bound=ft.TemplateContextProcessorCallable +) +T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) +T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) +T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) +T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) +T_url_value_preprocessor = t.TypeVar( + "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable +) + + +class BlueprintSetupState: + """Temporary holder object for registering a blueprint with the + application. An instance of this class is created by the + :meth:`~flask.Blueprint.make_setup_state` method and later passed + to all register callback functions. + """ + + def __init__( + self, + blueprint: Blueprint, + app: Flask, + options: t.Any, + first_registration: bool, + ) -> None: + #: a reference to the current application + self.app = app + + #: a reference to the blueprint that created this setup state. + self.blueprint = blueprint + + #: a dictionary with all options that were passed to the + #: :meth:`~flask.Flask.register_blueprint` method. + self.options = options + + #: as blueprints can be registered multiple times with the + #: application and not everything wants to be registered + #: multiple times on it, this attribute can be used to figure + #: out if the blueprint was registered in the past already. + self.first_registration = first_registration + + subdomain = self.options.get("subdomain") + if subdomain is None: + subdomain = self.blueprint.subdomain + + #: The subdomain that the blueprint should be active for, ``None`` + #: otherwise. + self.subdomain = subdomain + + url_prefix = self.options.get("url_prefix") + if url_prefix is None: + url_prefix = self.blueprint.url_prefix + #: The prefix that should be used for all URLs defined on the + #: blueprint. + self.url_prefix = url_prefix + + self.name = self.options.get("name", blueprint.name) + self.name_prefix = self.options.get("name_prefix", "") + + #: A dictionary with URL defaults that is added to each and every + #: URL that was defined with the blueprint. + self.url_defaults = dict(self.blueprint.url_values_defaults) + self.url_defaults.update(self.options.get("url_defaults", ())) + + def add_url_rule( + self, + rule: str, + endpoint: str | None = None, + view_func: t.Callable | None = None, + **options: t.Any, + ) -> None: + """A helper method to register a rule (and optionally a view function) + to the application. The endpoint is automatically prefixed with the + blueprint's name. + """ + if self.url_prefix is not None: + if rule: + rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) + else: + rule = self.url_prefix + options.setdefault("subdomain", self.subdomain) + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) # type: ignore + defaults = self.url_defaults + if "defaults" in options: + defaults = dict(defaults, **options.pop("defaults")) + + self.app.add_url_rule( + rule, + f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), + view_func, + defaults=defaults, + **options, + ) + + +class Blueprint(Scaffold): + """Represents a blueprint, a collection of routes and other + app-related functions that can be registered on a real application + later. + + A blueprint is an object that allows defining application functions + without requiring an application object ahead of time. It uses the + same decorators as :class:`~flask.Flask`, but defers the need for an + application by recording them for later registration. + + Decorating a function with a blueprint creates a deferred function + that is called with :class:`~flask.blueprints.BlueprintSetupState` + when the blueprint is registered on an application. + + See :doc:`/blueprints` for more information. + + :param name: The name of the blueprint. Will be prepended to each + endpoint name. + :param import_name: The name of the blueprint package, usually + ``__name__``. This helps locate the ``root_path`` for the + blueprint. + :param static_folder: A folder with static files that should be + served by the blueprint's static route. The path is relative to + the blueprint's root path. Blueprint static files are disabled + by default. + :param static_url_path: The url to serve static files from. + Defaults to ``static_folder``. If the blueprint does not have + a ``url_prefix``, the app's static route will take precedence, + and the blueprint's static files won't be accessible. + :param template_folder: A folder with templates that should be added + to the app's template search path. The path is relative to the + blueprint's root path. Blueprint templates are disabled by + default. Blueprint templates have a lower precedence than those + in the app's templates folder. + :param url_prefix: A path to prepend to all of the blueprint's URLs, + to make them distinct from the rest of the app's routes. + :param subdomain: A subdomain that blueprint routes will match on by + default. + :param url_defaults: A dict of default values that blueprint routes + will receive by default. + :param root_path: By default, the blueprint will automatically set + this based on ``import_name``. In certain situations this + automatic detection can fail, so the path can be specified + manually instead. + + .. versionchanged:: 1.1.0 + Blueprints have a ``cli`` group to register nested CLI commands. + The ``cli_group`` parameter controls the name of the group under + the ``flask`` command. + + .. versionadded:: 0.7 + """ + + _got_registered_once = False + + def __init__( + self, + name: str, + import_name: str, + static_folder: str | os.PathLike | None = None, + static_url_path: str | None = None, + template_folder: str | os.PathLike | None = None, + url_prefix: str | None = None, + subdomain: str | None = None, + url_defaults: dict | None = None, + root_path: str | None = None, + cli_group: str | None = _sentinel, # type: ignore + ): + super().__init__( + import_name=import_name, + static_folder=static_folder, + static_url_path=static_url_path, + template_folder=template_folder, + root_path=root_path, + ) + + if not name: + raise ValueError("'name' may not be empty.") + + if "." in name: + raise ValueError("'name' may not contain a dot '.' character.") + + self.name = name + self.url_prefix = url_prefix + self.subdomain = subdomain + self.deferred_functions: list[DeferredSetupFunction] = [] + + if url_defaults is None: + url_defaults = {} + + self.url_values_defaults = url_defaults + self.cli_group = cli_group + self._blueprints: list[tuple[Blueprint, dict]] = [] + + def _check_setup_finished(self, f_name: str) -> None: + if self._got_registered_once: + raise AssertionError( + f"The setup method '{f_name}' can no longer be called on the blueprint" + f" '{self.name}'. It has already been registered at least once, any" + " changes will not be applied consistently.\n" + "Make sure all imports, decorators, functions, etc. needed to set up" + " the blueprint are done before registering it." + ) + + @setupmethod + def record(self, func: t.Callable) -> None: + """Registers a function that is called when the blueprint is + registered on the application. This function is called with the + state as argument as returned by the :meth:`make_setup_state` + method. + """ + self.deferred_functions.append(func) + + @setupmethod + def record_once(self, func: t.Callable) -> None: + """Works like :meth:`record` but wraps the function in another + function that will ensure the function is only called once. If the + blueprint is registered a second time on the application, the + function passed is not called. + """ + + def wrapper(state: BlueprintSetupState) -> None: + if state.first_registration: + func(state) + + self.record(update_wrapper(wrapper, func)) + + def make_setup_state( + self, app: Flask, options: dict, first_registration: bool = False + ) -> BlueprintSetupState: + """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` + object that is later passed to the register callback functions. + Subclasses can override this to return a subclass of the setup state. + """ + return BlueprintSetupState(self, app, options, first_registration) + + @setupmethod + def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: + """Register a :class:`~flask.Blueprint` on this blueprint. Keyword + arguments passed to this method will override the defaults set + on the blueprint. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + + .. versionadded:: 2.0 + """ + if blueprint is self: + raise ValueError("Cannot register a blueprint on itself") + self._blueprints.append((blueprint, options)) + + def register(self, app: Flask, options: dict) -> None: + """Called by :meth:`Flask.register_blueprint` to register all + views and callbacks registered on the blueprint with the + application. Creates a :class:`.BlueprintSetupState` and calls + each :meth:`record` callback with it. + + :param app: The application this blueprint is being registered + with. + :param options: Keyword arguments forwarded from + :meth:`~Flask.register_blueprint`. + + .. versionchanged:: 2.3 + Nested blueprints now correctly apply subdomains. + + .. versionchanged:: 2.1 + Registering the same blueprint with the same name multiple + times is an error. + + .. versionchanged:: 2.0.1 + Nested blueprints are registered with their dotted name. + This allows different blueprints with the same name to be + nested at different locations. + + .. versionchanged:: 2.0.1 + The ``name`` option can be used to change the (pre-dotted) + name the blueprint is registered with. This allows the same + blueprint to be registered multiple times with unique names + for ``url_for``. + """ + name_prefix = options.get("name_prefix", "") + self_name = options.get("name", self.name) + name = f"{name_prefix}.{self_name}".lstrip(".") + + if name in app.blueprints: + bp_desc = "this" if app.blueprints[name] is self else "a different" + existing_at = f" '{name}'" if self_name != name else "" + + raise ValueError( + f"The name '{self_name}' is already registered for" + f" {bp_desc} blueprint{existing_at}. Use 'name=' to" + f" provide a unique name." + ) + + first_bp_registration = not any(bp is self for bp in app.blueprints.values()) + first_name_registration = name not in app.blueprints + + app.blueprints[name] = self + self._got_registered_once = True + state = self.make_setup_state(app, options, first_bp_registration) + + if self.has_static_folder: + state.add_url_rule( + f"{self.static_url_path}/<path:filename>", + view_func=self.send_static_file, + endpoint="static", + ) + + # Merge blueprint data into parent. + if first_bp_registration or first_name_registration: + + def extend(bp_dict, parent_dict): + for key, values in bp_dict.items(): + key = name if key is None else f"{name}.{key}" + parent_dict[key].extend(values) + + for key, value in self.error_handler_spec.items(): + key = name if key is None else f"{name}.{key}" + value = defaultdict( + dict, + { + code: { + exc_class: func for exc_class, func in code_values.items() + } + for code, code_values in value.items() + }, + ) + app.error_handler_spec[key] = value + + for endpoint, func in self.view_functions.items(): + app.view_functions[endpoint] = func + + extend(self.before_request_funcs, app.before_request_funcs) + extend(self.after_request_funcs, app.after_request_funcs) + extend( + self.teardown_request_funcs, + app.teardown_request_funcs, + ) + extend(self.url_default_functions, app.url_default_functions) + extend(self.url_value_preprocessors, app.url_value_preprocessors) + extend(self.template_context_processors, app.template_context_processors) + + for deferred in self.deferred_functions: + deferred(state) + + cli_resolved_group = options.get("cli_group", self.cli_group) + + if self.cli.commands: + if cli_resolved_group is None: + app.cli.commands.update(self.cli.commands) + elif cli_resolved_group is _sentinel: + self.cli.name = name + app.cli.add_command(self.cli) + else: + self.cli.name = cli_resolved_group + app.cli.add_command(self.cli) + + for blueprint, bp_options in self._blueprints: + bp_options = bp_options.copy() + bp_url_prefix = bp_options.get("url_prefix") + bp_subdomain = bp_options.get("subdomain") + + if bp_subdomain is None: + bp_subdomain = blueprint.subdomain + + if state.subdomain is not None and bp_subdomain is not None: + bp_options["subdomain"] = bp_subdomain + "." + state.subdomain + elif bp_subdomain is not None: + bp_options["subdomain"] = bp_subdomain + elif state.subdomain is not None: + bp_options["subdomain"] = state.subdomain + + if bp_url_prefix is None: + bp_url_prefix = blueprint.url_prefix + + if state.url_prefix is not None and bp_url_prefix is not None: + bp_options["url_prefix"] = ( + state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") + ) + elif bp_url_prefix is not None: + bp_options["url_prefix"] = bp_url_prefix + elif state.url_prefix is not None: + bp_options["url_prefix"] = state.url_prefix + + bp_options["name_prefix"] = name + blueprint.register(app, bp_options) + + @setupmethod + def add_url_rule( + self, + rule: str, + endpoint: str | None = None, + view_func: ft.RouteCallable | None = None, + provide_automatic_options: bool | None = None, + **options: t.Any, + ) -> None: + """Register a URL rule with the blueprint. See :meth:`.Flask.add_url_rule` for + full documentation. + + The URL rule is prefixed with the blueprint's URL prefix. The endpoint name, + used with :func:`url_for`, is prefixed with the blueprint's name. + """ + if endpoint and "." in endpoint: + raise ValueError("'endpoint' may not contain a dot '.' character.") + + if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: + raise ValueError("'view_func' name may not contain a dot '.' character.") + + self.record( + lambda s: s.add_url_rule( + rule, + endpoint, + view_func, + provide_automatic_options=provide_automatic_options, + **options, + ) + ) + + @setupmethod + def app_template_filter( + self, name: str | None = None + ) -> t.Callable[[T_template_filter], T_template_filter]: + """Register a template filter, available in any template rendered by the + application. Equivalent to :meth:`.Flask.template_filter`. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def decorator(f: T_template_filter) -> T_template_filter: + self.add_app_template_filter(f, name=name) + return f + + return decorator + + @setupmethod + def add_app_template_filter( + self, f: ft.TemplateFilterCallable, name: str | None = None + ) -> None: + """Register a template filter, available in any template rendered by the + application. Works like the :meth:`app_template_filter` decorator. Equivalent to + :meth:`.Flask.add_template_filter`. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.filters[name or f.__name__] = f + + self.record_once(register_template) + + @setupmethod + def app_template_test( + self, name: str | None = None + ) -> t.Callable[[T_template_test], T_template_test]: + """Register a template test, available in any template rendered by the + application. Equivalent to :meth:`.Flask.template_test`. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def decorator(f: T_template_test) -> T_template_test: + self.add_app_template_test(f, name=name) + return f + + return decorator + + @setupmethod + def add_app_template_test( + self, f: ft.TemplateTestCallable, name: str | None = None + ) -> None: + """Register a template test, available in any template rendered by the + application. Works like the :meth:`app_template_test` decorator. Equivalent to + :meth:`.Flask.add_template_test`. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.tests[name or f.__name__] = f + + self.record_once(register_template) + + @setupmethod + def app_template_global( + self, name: str | None = None + ) -> t.Callable[[T_template_global], T_template_global]: + """Register a template global, available in any template rendered by the + application. Equivalent to :meth:`.Flask.template_global`. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def decorator(f: T_template_global) -> T_template_global: + self.add_app_template_global(f, name=name) + return f + + return decorator + + @setupmethod + def add_app_template_global( + self, f: ft.TemplateGlobalCallable, name: str | None = None + ) -> None: + """Register a template global, available in any template rendered by the + application. Works like the :meth:`app_template_global` decorator. Equivalent to + :meth:`.Flask.add_template_global`. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + + def register_template(state: BlueprintSetupState) -> None: + state.app.jinja_env.globals[name or f.__name__] = f + + self.record_once(register_template) + + @setupmethod + def before_app_request(self, f: T_before_request) -> T_before_request: + """Like :meth:`before_request`, but before every request, not only those handled + by the blueprint. Equivalent to :meth:`.Flask.before_request`. + """ + self.record_once( + lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def after_app_request(self, f: T_after_request) -> T_after_request: + """Like :meth:`after_request`, but after every request, not only those handled + by the blueprint. Equivalent to :meth:`.Flask.after_request`. + """ + self.record_once( + lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def teardown_app_request(self, f: T_teardown) -> T_teardown: + """Like :meth:`teardown_request`, but after every request, not only those + handled by the blueprint. Equivalent to :meth:`.Flask.teardown_request`. + """ + self.record_once( + lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def app_context_processor( + self, f: T_template_context_processor + ) -> T_template_context_processor: + """Like :meth:`context_processor`, but for templates rendered by every view, not + only by the blueprint. Equivalent to :meth:`.Flask.context_processor`. + """ + self.record_once( + lambda s: s.app.template_context_processors.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def app_errorhandler( + self, code: type[Exception] | int + ) -> t.Callable[[T_error_handler], T_error_handler]: + """Like :meth:`errorhandler`, but for every request, not only those handled by + the blueprint. Equivalent to :meth:`.Flask.errorhandler`. + """ + + def decorator(f: T_error_handler) -> T_error_handler: + self.record_once(lambda s: s.app.errorhandler(code)(f)) + return f + + return decorator + + @setupmethod + def app_url_value_preprocessor( + self, f: T_url_value_preprocessor + ) -> T_url_value_preprocessor: + """Like :meth:`url_value_preprocessor`, but for every request, not only those + handled by the blueprint. Equivalent to :meth:`.Flask.url_value_preprocessor`. + """ + self.record_once( + lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) + ) + return f + + @setupmethod + def app_url_defaults(self, f: T_url_defaults) -> T_url_defaults: + """Like :meth:`url_defaults`, but for every request, not only those handled by + the blueprint. Equivalent to :meth:`.Flask.url_defaults`. + """ + self.record_once( + lambda s: s.app.url_default_functions.setdefault(None, []).append(f) + ) + return f diff --git a/backend/test/lib/python3.8/site-packages/flask/cli.py b/backend/test/lib/python3.8/site-packages/flask/cli.py new file mode 100644 index 0000000000000000000000000000000000000000..dda266b30ca810c1db50e429e9357845b9a9962d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/cli.py @@ -0,0 +1,1068 @@ +from __future__ import annotations + +import ast +import importlib.metadata +import inspect +import os +import platform +import re +import sys +import traceback +import typing as t +from functools import update_wrapper +from operator import itemgetter + +import click +from click.core import ParameterSource +from werkzeug import run_simple +from werkzeug.serving import is_running_from_reloader +from werkzeug.utils import import_string + +from .globals import current_app +from .helpers import get_debug_flag +from .helpers import get_load_dotenv + +if t.TYPE_CHECKING: + from .app import Flask + + +class NoAppException(click.UsageError): + """Raised if an application cannot be found or loaded.""" + + +def find_best_app(module): + """Given a module instance this tries to find the best possible + application in the module or raises an exception. + """ + from . import Flask + + # Search for the most common names first. + for attr_name in ("app", "application"): + app = getattr(module, attr_name, None) + + if isinstance(app, Flask): + return app + + # Otherwise find the only object that is a Flask instance. + matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] + + if len(matches) == 1: + return matches[0] + elif len(matches) > 1: + raise NoAppException( + "Detected multiple Flask applications in module" + f" '{module.__name__}'. Use '{module.__name__}:name'" + " to specify the correct one." + ) + + # Search for app factory functions. + for attr_name in ("create_app", "make_app"): + app_factory = getattr(module, attr_name, None) + + if inspect.isfunction(app_factory): + try: + app = app_factory() + + if isinstance(app, Flask): + return app + except TypeError as e: + if not _called_with_wrong_args(app_factory): + raise + + raise NoAppException( + f"Detected factory '{attr_name}' in module '{module.__name__}'," + " but could not call it without arguments. Use" + f" '{module.__name__}:{attr_name}(args)'" + " to specify arguments." + ) from e + + raise NoAppException( + "Failed to find Flask application or factory in module" + f" '{module.__name__}'. Use '{module.__name__}:name'" + " to specify one." + ) + + +def _called_with_wrong_args(f): + """Check whether calling a function raised a ``TypeError`` because + the call failed or because something in the factory raised the + error. + + :param f: The function that was called. + :return: ``True`` if the call failed. + """ + tb = sys.exc_info()[2] + + try: + while tb is not None: + if tb.tb_frame.f_code is f.__code__: + # In the function, it was called successfully. + return False + + tb = tb.tb_next + + # Didn't reach the function. + return True + finally: + # Delete tb to break a circular reference. + # https://docs.python.org/2/library/sys.html#sys.exc_info + del tb + + +def find_app_by_string(module, app_name): + """Check if the given string is a variable name or a function. Call + a function to get the app instance, or return the variable directly. + """ + from . import Flask + + # Parse app_name as a single expression to determine if it's a valid + # attribute name or function call. + try: + expr = ast.parse(app_name.strip(), mode="eval").body + except SyntaxError: + raise NoAppException( + f"Failed to parse {app_name!r} as an attribute name or function call." + ) from None + + if isinstance(expr, ast.Name): + name = expr.id + args = [] + kwargs = {} + elif isinstance(expr, ast.Call): + # Ensure the function name is an attribute name only. + if not isinstance(expr.func, ast.Name): + raise NoAppException( + f"Function reference must be a simple name: {app_name!r}." + ) + + name = expr.func.id + + # Parse the positional and keyword arguments as literals. + try: + args = [ast.literal_eval(arg) for arg in expr.args] + kwargs = {kw.arg: ast.literal_eval(kw.value) for kw in expr.keywords} + except ValueError: + # literal_eval gives cryptic error messages, show a generic + # message with the full expression instead. + raise NoAppException( + f"Failed to parse arguments as literal values: {app_name!r}." + ) from None + else: + raise NoAppException( + f"Failed to parse {app_name!r} as an attribute name or function call." + ) + + try: + attr = getattr(module, name) + except AttributeError as e: + raise NoAppException( + f"Failed to find attribute {name!r} in {module.__name__!r}." + ) from e + + # If the attribute is a function, call it with any args and kwargs + # to get the real application. + if inspect.isfunction(attr): + try: + app = attr(*args, **kwargs) + except TypeError as e: + if not _called_with_wrong_args(attr): + raise + + raise NoAppException( + f"The factory {app_name!r} in module" + f" {module.__name__!r} could not be called with the" + " specified arguments." + ) from e + else: + app = attr + + if isinstance(app, Flask): + return app + + raise NoAppException( + "A valid Flask application was not obtained from" + f" '{module.__name__}:{app_name}'." + ) + + +def prepare_import(path): + """Given a filename this will try to calculate the python path, add it + to the search path and return the actual module name that is expected. + """ + path = os.path.realpath(path) + + fname, ext = os.path.splitext(path) + if ext == ".py": + path = fname + + if os.path.basename(path) == "__init__": + path = os.path.dirname(path) + + module_name = [] + + # move up until outside package structure (no __init__.py) + while True: + path, name = os.path.split(path) + module_name.append(name) + + if not os.path.exists(os.path.join(path, "__init__.py")): + break + + if sys.path[0] != path: + sys.path.insert(0, path) + + return ".".join(module_name[::-1]) + + +def locate_app(module_name, app_name, raise_if_not_found=True): + try: + __import__(module_name) + except ImportError: + # Reraise the ImportError if it occurred within the imported module. + # Determine this by checking whether the trace has a depth > 1. + if sys.exc_info()[2].tb_next: + raise NoAppException( + f"While importing {module_name!r}, an ImportError was" + f" raised:\n\n{traceback.format_exc()}" + ) from None + elif raise_if_not_found: + raise NoAppException(f"Could not import {module_name!r}.") from None + else: + return + + module = sys.modules[module_name] + + if app_name is None: + return find_best_app(module) + else: + return find_app_by_string(module, app_name) + + +def get_version(ctx, param, value): + if not value or ctx.resilient_parsing: + return + + flask_version = importlib.metadata.version("flask") + werkzeug_version = importlib.metadata.version("werkzeug") + + click.echo( + f"Python {platform.python_version()}\n" + f"Flask {flask_version}\n" + f"Werkzeug {werkzeug_version}", + color=ctx.color, + ) + ctx.exit() + + +version_option = click.Option( + ["--version"], + help="Show the Flask version.", + expose_value=False, + callback=get_version, + is_flag=True, + is_eager=True, +) + + +class ScriptInfo: + """Helper object to deal with Flask applications. This is usually not + necessary to interface with as it's used internally in the dispatching + to click. In future versions of Flask this object will most likely play + a bigger role. Typically it's created automatically by the + :class:`FlaskGroup` but you can also manually create it and pass it + onwards as click object. + """ + + def __init__( + self, + app_import_path: str | None = None, + create_app: t.Callable[..., Flask] | None = None, + set_debug_flag: bool = True, + ) -> None: + #: Optionally the import path for the Flask application. + self.app_import_path = app_import_path + #: Optionally a function that is passed the script info to create + #: the instance of the application. + self.create_app = create_app + #: A dictionary with arbitrary data that can be associated with + #: this script info. + self.data: dict[t.Any, t.Any] = {} + self.set_debug_flag = set_debug_flag + self._loaded_app: Flask | None = None + + def load_app(self) -> Flask: + """Loads the Flask app (if not yet loaded) and returns it. Calling + this multiple times will just result in the already loaded app to + be returned. + """ + if self._loaded_app is not None: + return self._loaded_app + + if self.create_app is not None: + app = self.create_app() + else: + if self.app_import_path: + path, name = ( + re.split(r":(?![\\/])", self.app_import_path, maxsplit=1) + [None] + )[:2] + import_name = prepare_import(path) + app = locate_app(import_name, name) + else: + for path in ("wsgi.py", "app.py"): + import_name = prepare_import(path) + app = locate_app(import_name, None, raise_if_not_found=False) + + if app: + break + + if not app: + raise NoAppException( + "Could not locate a Flask application. Use the" + " 'flask --app' option, 'FLASK_APP' environment" + " variable, or a 'wsgi.py' or 'app.py' file in the" + " current directory." + ) + + if self.set_debug_flag: + # Update the app's debug flag through the descriptor so that + # other values repopulate as well. + app.debug = get_debug_flag() + + self._loaded_app = app + return app + + +pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) + + +def with_appcontext(f): + """Wraps a callback so that it's guaranteed to be executed with the + script's application context. + + Custom commands (and their options) registered under ``app.cli`` or + ``blueprint.cli`` will always have an app context available, this + decorator is not required in that case. + + .. versionchanged:: 2.2 + The app context is active for subcommands as well as the + decorated callback. The app context is always available to + ``app.cli`` command and parameter callbacks. + """ + + @click.pass_context + def decorator(__ctx, *args, **kwargs): + if not current_app: + app = __ctx.ensure_object(ScriptInfo).load_app() + __ctx.with_resource(app.app_context()) + + return __ctx.invoke(f, *args, **kwargs) + + return update_wrapper(decorator, f) + + +class AppGroup(click.Group): + """This works similar to a regular click :class:`~click.Group` but it + changes the behavior of the :meth:`command` decorator so that it + automatically wraps the functions in :func:`with_appcontext`. + + Not to be confused with :class:`FlaskGroup`. + """ + + def command(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` + unless it's disabled by passing ``with_appcontext=False``. + """ + wrap_for_ctx = kwargs.pop("with_appcontext", True) + + def decorator(f): + if wrap_for_ctx: + f = with_appcontext(f) + return click.Group.command(self, *args, **kwargs)(f) + + return decorator + + def group(self, *args, **kwargs): + """This works exactly like the method of the same name on a regular + :class:`click.Group` but it defaults the group class to + :class:`AppGroup`. + """ + kwargs.setdefault("cls", AppGroup) + return click.Group.group(self, *args, **kwargs) + + +def _set_app(ctx: click.Context, param: click.Option, value: str | None) -> str | None: + if value is None: + return None + + info = ctx.ensure_object(ScriptInfo) + info.app_import_path = value + return value + + +# This option is eager so the app will be available if --help is given. +# --help is also eager, so --app must be before it in the param list. +# no_args_is_help bypasses eager processing, so this option must be +# processed manually in that case to ensure FLASK_APP gets picked up. +_app_option = click.Option( + ["-A", "--app"], + metavar="IMPORT", + help=( + "The Flask application or factory function to load, in the form 'module:name'." + " Module can be a dotted import or file path. Name is not required if it is" + " 'app', 'application', 'create_app', or 'make_app', and can be 'name(args)' to" + " pass arguments." + ), + is_eager=True, + expose_value=False, + callback=_set_app, +) + + +def _set_debug(ctx: click.Context, param: click.Option, value: bool) -> bool | None: + # If the flag isn't provided, it will default to False. Don't use + # that, let debug be set by env in that case. + source = ctx.get_parameter_source(param.name) # type: ignore[arg-type] + + if source is not None and source in ( + ParameterSource.DEFAULT, + ParameterSource.DEFAULT_MAP, + ): + return None + + # Set with env var instead of ScriptInfo.load so that it can be + # accessed early during a factory function. + os.environ["FLASK_DEBUG"] = "1" if value else "0" + return value + + +_debug_option = click.Option( + ["--debug/--no-debug"], + help="Set debug mode.", + expose_value=False, + callback=_set_debug, +) + + +def _env_file_callback( + ctx: click.Context, param: click.Option, value: str | None +) -> str | None: + if value is None: + return None + + import importlib + + try: + importlib.import_module("dotenv") + except ImportError: + raise click.BadParameter( + "python-dotenv must be installed to load an env file.", + ctx=ctx, + param=param, + ) from None + + # Don't check FLASK_SKIP_DOTENV, that only disables automatically + # loading .env and .flaskenv files. + load_dotenv(value) + return value + + +# This option is eager so env vars are loaded as early as possible to be +# used by other options. +_env_file_option = click.Option( + ["-e", "--env-file"], + type=click.Path(exists=True, dir_okay=False), + help="Load environment variables from this file. python-dotenv must be installed.", + is_eager=True, + expose_value=False, + callback=_env_file_callback, +) + + +class FlaskGroup(AppGroup): + """Special subclass of the :class:`AppGroup` group that supports + loading more commands from the configured Flask app. Normally a + developer does not have to interface with this class but there are + some very advanced use cases for which it makes sense to create an + instance of this. see :ref:`custom-scripts`. + + :param add_default_commands: if this is True then the default run and + shell commands will be added. + :param add_version_option: adds the ``--version`` option. + :param create_app: an optional callback that is passed the script info and + returns the loaded app. + :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` + files to set environment variables. Will also change the working + directory to the directory containing the first file found. + :param set_debug_flag: Set the app's debug flag. + + .. versionchanged:: 2.2 + Added the ``-A/--app``, ``--debug/--no-debug``, ``-e/--env-file`` options. + + .. versionchanged:: 2.2 + An app context is pushed when running ``app.cli`` commands, so + ``@with_appcontext`` is no longer required for those commands. + + .. versionchanged:: 1.0 + If installed, python-dotenv will be used to load environment variables + from :file:`.env` and :file:`.flaskenv` files. + """ + + def __init__( + self, + add_default_commands: bool = True, + create_app: t.Callable[..., Flask] | None = None, + add_version_option: bool = True, + load_dotenv: bool = True, + set_debug_flag: bool = True, + **extra: t.Any, + ) -> None: + params = list(extra.pop("params", None) or ()) + # Processing is done with option callbacks instead of a group + # callback. This allows users to make a custom group callback + # without losing the behavior. --env-file must come first so + # that it is eagerly evaluated before --app. + params.extend((_env_file_option, _app_option, _debug_option)) + + if add_version_option: + params.append(version_option) + + if "context_settings" not in extra: + extra["context_settings"] = {} + + extra["context_settings"].setdefault("auto_envvar_prefix", "FLASK") + + super().__init__(params=params, **extra) + + self.create_app = create_app + self.load_dotenv = load_dotenv + self.set_debug_flag = set_debug_flag + + if add_default_commands: + self.add_command(run_command) + self.add_command(shell_command) + self.add_command(routes_command) + + self._loaded_plugin_commands = False + + def _load_plugin_commands(self): + if self._loaded_plugin_commands: + return + + if sys.version_info >= (3, 10): + from importlib import metadata + else: + # Use a backport on Python < 3.10. We technically have + # importlib.metadata on 3.8+, but the API changed in 3.10, + # so use the backport for consistency. + import importlib_metadata as metadata + + for ep in metadata.entry_points(group="flask.commands"): + self.add_command(ep.load(), ep.name) + + self._loaded_plugin_commands = True + + def get_command(self, ctx, name): + self._load_plugin_commands() + # Look up built-in and plugin commands, which should be + # available even if the app fails to load. + rv = super().get_command(ctx, name) + + if rv is not None: + return rv + + info = ctx.ensure_object(ScriptInfo) + + # Look up commands provided by the app, showing an error and + # continuing if the app couldn't be loaded. + try: + app = info.load_app() + except NoAppException as e: + click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") + return None + + # Push an app context for the loaded app unless it is already + # active somehow. This makes the context available to parameter + # and command callbacks without needing @with_appcontext. + if not current_app or current_app._get_current_object() is not app: + ctx.with_resource(app.app_context()) + + return app.cli.get_command(ctx, name) + + def list_commands(self, ctx): + self._load_plugin_commands() + # Start with the built-in and plugin commands. + rv = set(super().list_commands(ctx)) + info = ctx.ensure_object(ScriptInfo) + + # Add commands provided by the app, showing an error and + # continuing if the app couldn't be loaded. + try: + rv.update(info.load_app().cli.list_commands(ctx)) + except NoAppException as e: + # When an app couldn't be loaded, show the error message + # without the traceback. + click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") + except Exception: + # When any other errors occurred during loading, show the + # full traceback. + click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") + + return sorted(rv) + + def make_context( + self, + info_name: str | None, + args: list[str], + parent: click.Context | None = None, + **extra: t.Any, + ) -> click.Context: + # Set a flag to tell app.run to become a no-op. If app.run was + # not in a __name__ == __main__ guard, it would start the server + # when importing, blocking whatever command is being called. + os.environ["FLASK_RUN_FROM_CLI"] = "true" + + # Attempt to load .env and .flask env files. The --env-file + # option can cause another file to be loaded. + if get_load_dotenv(self.load_dotenv): + load_dotenv() + + if "obj" not in extra and "obj" not in self.context_settings: + extra["obj"] = ScriptInfo( + create_app=self.create_app, set_debug_flag=self.set_debug_flag + ) + + return super().make_context(info_name, args, parent=parent, **extra) + + def parse_args(self, ctx: click.Context, args: list[str]) -> list[str]: + if not args and self.no_args_is_help: + # Attempt to load --env-file and --app early in case they + # were given as env vars. Otherwise no_args_is_help will not + # see commands from app.cli. + _env_file_option.handle_parse_result(ctx, {}, []) + _app_option.handle_parse_result(ctx, {}, []) + + return super().parse_args(ctx, args) + + +def _path_is_ancestor(path, other): + """Take ``other`` and remove the length of ``path`` from it. Then join it + to ``path``. If it is the original value, ``path`` is an ancestor of + ``other``.""" + return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other + + +def load_dotenv(path: str | os.PathLike | None = None) -> bool: + """Load "dotenv" files in order of precedence to set environment variables. + + If an env var is already set it is not overwritten, so earlier files in the + list are preferred over later files. + + This is a no-op if `python-dotenv`_ is not installed. + + .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme + + :param path: Load the file at this location instead of searching. + :return: ``True`` if a file was loaded. + + .. versionchanged:: 2.0 + The current directory is not changed to the location of the + loaded file. + + .. versionchanged:: 2.0 + When loading the env files, set the default encoding to UTF-8. + + .. versionchanged:: 1.1.0 + Returns ``False`` when python-dotenv is not installed, or when + the given path isn't a file. + + .. versionadded:: 1.0 + """ + try: + import dotenv + except ImportError: + if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): + click.secho( + " * Tip: There are .env or .flaskenv files present." + ' Do "pip install python-dotenv" to use them.', + fg="yellow", + err=True, + ) + + return False + + # Always return after attempting to load a given path, don't load + # the default files. + if path is not None: + if os.path.isfile(path): + return dotenv.load_dotenv(path, encoding="utf-8") + + return False + + loaded = False + + for name in (".env", ".flaskenv"): + path = dotenv.find_dotenv(name, usecwd=True) + + if not path: + continue + + dotenv.load_dotenv(path, encoding="utf-8") + loaded = True + + return loaded # True if at least one file was located and loaded. + + +def show_server_banner(debug, app_import_path): + """Show extra startup messages the first time the server is run, + ignoring the reloader. + """ + if is_running_from_reloader(): + return + + if app_import_path is not None: + click.echo(f" * Serving Flask app '{app_import_path}'") + + if debug is not None: + click.echo(f" * Debug mode: {'on' if debug else 'off'}") + + +class CertParamType(click.ParamType): + """Click option type for the ``--cert`` option. Allows either an + existing file, the string ``'adhoc'``, or an import for a + :class:`~ssl.SSLContext` object. + """ + + name = "path" + + def __init__(self): + self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) + + def convert(self, value, param, ctx): + try: + import ssl + except ImportError: + raise click.BadParameter( + 'Using "--cert" requires Python to be compiled with SSL support.', + ctx, + param, + ) from None + + try: + return self.path_type(value, param, ctx) + except click.BadParameter: + value = click.STRING(value, param, ctx).lower() + + if value == "adhoc": + try: + import cryptography # noqa: F401 + except ImportError: + raise click.BadParameter( + "Using ad-hoc certificates requires the cryptography library.", + ctx, + param, + ) from None + + return value + + obj = import_string(value, silent=True) + + if isinstance(obj, ssl.SSLContext): + return obj + + raise + + +def _validate_key(ctx, param, value): + """The ``--key`` option must be specified when ``--cert`` is a file. + Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. + """ + cert = ctx.params.get("cert") + is_adhoc = cert == "adhoc" + + try: + import ssl + except ImportError: + is_context = False + else: + is_context = isinstance(cert, ssl.SSLContext) + + if value is not None: + if is_adhoc: + raise click.BadParameter( + 'When "--cert" is "adhoc", "--key" is not used.', ctx, param + ) + + if is_context: + raise click.BadParameter( + 'When "--cert" is an SSLContext object, "--key is not used.', ctx, param + ) + + if not cert: + raise click.BadParameter('"--cert" must also be specified.', ctx, param) + + ctx.params["cert"] = cert, value + + else: + if cert and not (is_adhoc or is_context): + raise click.BadParameter('Required when using "--cert".', ctx, param) + + return value + + +class SeparatedPathType(click.Path): + """Click option type that accepts a list of values separated by the + OS's path separator (``:``, ``;`` on Windows). Each value is + validated as a :class:`click.Path` type. + """ + + def convert(self, value, param, ctx): + items = self.split_envvar_value(value) + super_convert = super().convert + return [super_convert(item, param, ctx) for item in items] + + +@click.command("run", short_help="Run a development server.") +@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") +@click.option("--port", "-p", default=5000, help="The port to bind to.") +@click.option( + "--cert", + type=CertParamType(), + help="Specify a certificate file to use HTTPS.", + is_eager=True, +) +@click.option( + "--key", + type=click.Path(exists=True, dir_okay=False, resolve_path=True), + callback=_validate_key, + expose_value=False, + help="The key file to use when specifying a certificate.", +) +@click.option( + "--reload/--no-reload", + default=None, + help="Enable or disable the reloader. By default the reloader " + "is active if debug is enabled.", +) +@click.option( + "--debugger/--no-debugger", + default=None, + help="Enable or disable the debugger. By default the debugger " + "is active if debug is enabled.", +) +@click.option( + "--with-threads/--without-threads", + default=True, + help="Enable or disable multithreading.", +) +@click.option( + "--extra-files", + default=None, + type=SeparatedPathType(), + help=( + "Extra files that trigger a reload on change. Multiple paths" + f" are separated by {os.path.pathsep!r}." + ), +) +@click.option( + "--exclude-patterns", + default=None, + type=SeparatedPathType(), + help=( + "Files matching these fnmatch patterns will not trigger a reload" + " on change. Multiple patterns are separated by" + f" {os.path.pathsep!r}." + ), +) +@pass_script_info +def run_command( + info, + host, + port, + reload, + debugger, + with_threads, + cert, + extra_files, + exclude_patterns, +): + """Run a local development server. + + This server is for development purposes only. It does not provide + the stability, security, or performance of production WSGI servers. + + The reloader and debugger are enabled by default with the '--debug' + option. + """ + try: + app = info.load_app() + except Exception as e: + if is_running_from_reloader(): + # When reloading, print out the error immediately, but raise + # it later so the debugger or server can handle it. + traceback.print_exc() + err = e + + def app(environ, start_response): + raise err from None + + else: + # When not reloading, raise the error immediately so the + # command fails. + raise e from None + + debug = get_debug_flag() + + if reload is None: + reload = debug + + if debugger is None: + debugger = debug + + show_server_banner(debug, info.app_import_path) + + run_simple( + host, + port, + app, + use_reloader=reload, + use_debugger=debugger, + threaded=with_threads, + ssl_context=cert, + extra_files=extra_files, + exclude_patterns=exclude_patterns, + ) + + +run_command.params.insert(0, _debug_option) + + +@click.command("shell", short_help="Run a shell in the app context.") +@with_appcontext +def shell_command() -> None: + """Run an interactive Python shell in the context of a given + Flask application. The application will populate the default + namespace of this shell according to its configuration. + + This is useful for executing small snippets of management code + without having to manually configure the application. + """ + import code + + banner = ( + f"Python {sys.version} on {sys.platform}\n" + f"App: {current_app.import_name}\n" + f"Instance: {current_app.instance_path}" + ) + ctx: dict = {} + + # Support the regular Python interpreter startup script if someone + # is using it. + startup = os.environ.get("PYTHONSTARTUP") + if startup and os.path.isfile(startup): + with open(startup) as f: + eval(compile(f.read(), startup, "exec"), ctx) + + ctx.update(current_app.make_shell_context()) + + # Site, customize, or startup script can set a hook to call when + # entering interactive mode. The default one sets up readline with + # tab and history completion. + interactive_hook = getattr(sys, "__interactivehook__", None) + + if interactive_hook is not None: + try: + import readline + from rlcompleter import Completer + except ImportError: + pass + else: + # rlcompleter uses __main__.__dict__ by default, which is + # flask.__main__. Use the shell context instead. + readline.set_completer(Completer(ctx).complete) + + interactive_hook() + + code.interact(banner=banner, local=ctx) + + +@click.command("routes", short_help="Show the routes for the app.") +@click.option( + "--sort", + "-s", + type=click.Choice(("endpoint", "methods", "domain", "rule", "match")), + default="endpoint", + help=( + "Method to sort routes by. 'match' is the order that Flask will match routes" + " when dispatching a request." + ), +) +@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") +@with_appcontext +def routes_command(sort: str, all_methods: bool) -> None: + """Show all registered routes with endpoints and methods.""" + rules = list(current_app.url_map.iter_rules()) + + if not rules: + click.echo("No routes were registered.") + return + + ignored_methods = set() if all_methods else {"HEAD", "OPTIONS"} + host_matching = current_app.url_map.host_matching + has_domain = any(rule.host if host_matching else rule.subdomain for rule in rules) + rows = [] + + for rule in rules: + row = [ + rule.endpoint, + ", ".join(sorted((rule.methods or set()) - ignored_methods)), + ] + + if has_domain: + row.append((rule.host if host_matching else rule.subdomain) or "") + + row.append(rule.rule) + rows.append(row) + + headers = ["Endpoint", "Methods"] + sorts = ["endpoint", "methods"] + + if has_domain: + headers.append("Host" if host_matching else "Subdomain") + sorts.append("domain") + + headers.append("Rule") + sorts.append("rule") + + try: + rows.sort(key=itemgetter(sorts.index(sort))) + except ValueError: + pass + + rows.insert(0, headers) + widths = [max(len(row[i]) for row in rows) for i in range(len(headers))] + rows.insert(1, ["-" * w for w in widths]) + template = " ".join(f"{{{i}:<{w}}}" for i, w in enumerate(widths)) + + for row in rows: + click.echo(template.format(*row)) + + +cli = FlaskGroup( + name="flask", + help="""\ +A general utility script for Flask applications. + +An application to load must be given with the '--app' option, +'FLASK_APP' environment variable, or with a 'wsgi.py' or 'app.py' file +in the current directory. +""", +) + + +def main() -> None: + cli.main() + + +if __name__ == "__main__": + main() diff --git a/backend/test/lib/python3.8/site-packages/flask/config.py b/backend/test/lib/python3.8/site-packages/flask/config.py new file mode 100644 index 0000000000000000000000000000000000000000..5f921b4dff57067cef833689a835c21e3454f845 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/config.py @@ -0,0 +1,347 @@ +from __future__ import annotations + +import errno +import json +import os +import types +import typing as t + +from werkzeug.utils import import_string + + +class ConfigAttribute: + """Makes an attribute forward to the config""" + + def __init__(self, name: str, get_converter: t.Callable | None = None) -> None: + self.__name__ = name + self.get_converter = get_converter + + def __get__(self, obj: t.Any, owner: t.Any = None) -> t.Any: + if obj is None: + return self + rv = obj.config[self.__name__] + if self.get_converter is not None: + rv = self.get_converter(rv) + return rv + + def __set__(self, obj: t.Any, value: t.Any) -> None: + obj.config[self.__name__] = value + + +class Config(dict): + """Works exactly like a dict but provides ways to fill it from files + or special dictionaries. There are two common patterns to populate the + config. + + Either you can fill the config from a config file:: + + app.config.from_pyfile('yourconfig.cfg') + + Or alternatively you can define the configuration options in the + module that calls :meth:`from_object` or provide an import path to + a module that should be loaded. It is also possible to tell it to + use the same module and with that provide the configuration values + just before the call:: + + DEBUG = True + SECRET_KEY = 'development key' + app.config.from_object(__name__) + + In both cases (loading from any Python file or loading from modules), + only uppercase keys are added to the config. This makes it possible to use + lowercase values in the config file for temporary values that are not added + to the config or to define the config keys in the same file that implements + the application. + + Probably the most interesting way to load configurations is from an + environment variable pointing to a file:: + + app.config.from_envvar('YOURAPPLICATION_SETTINGS') + + In this case before launching the application you have to set this + environment variable to the file you want to use. On Linux and OS X + use the export statement:: + + export YOURAPPLICATION_SETTINGS='/path/to/config/file' + + On windows use `set` instead. + + :param root_path: path to which files are read relative from. When the + config object is created by the application, this is + the application's :attr:`~flask.Flask.root_path`. + :param defaults: an optional dictionary of default values + """ + + def __init__( + self, root_path: str | os.PathLike, defaults: dict | None = None + ) -> None: + super().__init__(defaults or {}) + self.root_path = root_path + + def from_envvar(self, variable_name: str, silent: bool = False) -> bool: + """Loads a configuration from an environment variable pointing to + a configuration file. This is basically just a shortcut with nicer + error messages for this line of code:: + + app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) + + :param variable_name: name of the environment variable + :param silent: set to ``True`` if you want silent failure for missing + files. + :return: ``True`` if the file was loaded successfully. + """ + rv = os.environ.get(variable_name) + if not rv: + if silent: + return False + raise RuntimeError( + f"The environment variable {variable_name!r} is not set" + " and as such configuration could not be loaded. Set" + " this variable and make it point to a configuration" + " file" + ) + return self.from_pyfile(rv, silent=silent) + + def from_prefixed_env( + self, prefix: str = "FLASK", *, loads: t.Callable[[str], t.Any] = json.loads + ) -> bool: + """Load any environment variables that start with ``FLASK_``, + dropping the prefix from the env key for the config key. Values + are passed through a loading function to attempt to convert them + to more specific types than strings. + + Keys are loaded in :func:`sorted` order. + + The default loading function attempts to parse values as any + valid JSON type, including dicts and lists. + + Specific items in nested dicts can be set by separating the + keys with double underscores (``__``). If an intermediate key + doesn't exist, it will be initialized to an empty dict. + + :param prefix: Load env vars that start with this prefix, + separated with an underscore (``_``). + :param loads: Pass each string value to this function and use + the returned value as the config value. If any error is + raised it is ignored and the value remains a string. The + default is :func:`json.loads`. + + .. versionadded:: 2.1 + """ + prefix = f"{prefix}_" + len_prefix = len(prefix) + + for key in sorted(os.environ): + if not key.startswith(prefix): + continue + + value = os.environ[key] + + try: + value = loads(value) + except Exception: + # Keep the value as a string if loading failed. + pass + + # Change to key.removeprefix(prefix) on Python >= 3.9. + key = key[len_prefix:] + + if "__" not in key: + # A non-nested key, set directly. + self[key] = value + continue + + # Traverse nested dictionaries with keys separated by "__". + current = self + *parts, tail = key.split("__") + + for part in parts: + # If an intermediate dict does not exist, create it. + if part not in current: + current[part] = {} + + current = current[part] + + current[tail] = value + + return True + + def from_pyfile(self, filename: str | os.PathLike, silent: bool = False) -> bool: + """Updates the values in the config from a Python file. This function + behaves as if the file was imported as module with the + :meth:`from_object` function. + + :param filename: the filename of the config. This can either be an + absolute filename or a filename relative to the + root path. + :param silent: set to ``True`` if you want silent failure for missing + files. + :return: ``True`` if the file was loaded successfully. + + .. versionadded:: 0.7 + `silent` parameter. + """ + filename = os.path.join(self.root_path, filename) + d = types.ModuleType("config") + d.__file__ = filename + try: + with open(filename, mode="rb") as config_file: + exec(compile(config_file.read(), filename, "exec"), d.__dict__) + except OSError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): + return False + e.strerror = f"Unable to load configuration file ({e.strerror})" + raise + self.from_object(d) + return True + + def from_object(self, obj: object | str) -> None: + """Updates the values from the given object. An object can be of one + of the following two types: + + - a string: in this case the object with that name will be imported + - an actual object reference: that object is used directly + + Objects are usually either modules or classes. :meth:`from_object` + loads only the uppercase attributes of the module/class. A ``dict`` + object will not work with :meth:`from_object` because the keys of a + ``dict`` are not attributes of the ``dict`` class. + + Example of module-based configuration:: + + app.config.from_object('yourapplication.default_config') + from yourapplication import default_config + app.config.from_object(default_config) + + Nothing is done to the object before loading. If the object is a + class and has ``@property`` attributes, it needs to be + instantiated before being passed to this method. + + You should not use this function to load the actual configuration but + rather configuration defaults. The actual config should be loaded + with :meth:`from_pyfile` and ideally from a location not within the + package because the package might be installed system wide. + + See :ref:`config-dev-prod` for an example of class-based configuration + using :meth:`from_object`. + + :param obj: an import name or object + """ + if isinstance(obj, str): + obj = import_string(obj) + for key in dir(obj): + if key.isupper(): + self[key] = getattr(obj, key) + + def from_file( + self, + filename: str | os.PathLike, + load: t.Callable[[t.IO[t.Any]], t.Mapping], + silent: bool = False, + text: bool = True, + ) -> bool: + """Update the values in the config from a file that is loaded + using the ``load`` parameter. The loaded data is passed to the + :meth:`from_mapping` method. + + .. code-block:: python + + import json + app.config.from_file("config.json", load=json.load) + + import tomllib + app.config.from_file("config.toml", load=tomllib.load, text=False) + + :param filename: The path to the data file. This can be an + absolute path or relative to the config root path. + :param load: A callable that takes a file handle and returns a + mapping of loaded data from the file. + :type load: ``Callable[[Reader], Mapping]`` where ``Reader`` + implements a ``read`` method. + :param silent: Ignore the file if it doesn't exist. + :param text: Open the file in text or binary mode. + :return: ``True`` if the file was loaded successfully. + + .. versionchanged:: 2.3 + The ``text`` parameter was added. + + .. versionadded:: 2.0 + """ + filename = os.path.join(self.root_path, filename) + + try: + with open(filename, "r" if text else "rb") as f: + obj = load(f) + except OSError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR): + return False + + e.strerror = f"Unable to load configuration file ({e.strerror})" + raise + + return self.from_mapping(obj) + + def from_mapping( + self, mapping: t.Mapping[str, t.Any] | None = None, **kwargs: t.Any + ) -> bool: + """Updates the config like :meth:`update` ignoring items with + non-upper keys. + + :return: Always returns ``True``. + + .. versionadded:: 0.11 + """ + mappings: dict[str, t.Any] = {} + if mapping is not None: + mappings.update(mapping) + mappings.update(kwargs) + for key, value in mappings.items(): + if key.isupper(): + self[key] = value + return True + + def get_namespace( + self, namespace: str, lowercase: bool = True, trim_namespace: bool = True + ) -> dict[str, t.Any]: + """Returns a dictionary containing a subset of configuration options + that match the specified namespace/prefix. Example usage:: + + app.config['IMAGE_STORE_TYPE'] = 'fs' + app.config['IMAGE_STORE_PATH'] = '/var/app/images' + app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' + image_store_config = app.config.get_namespace('IMAGE_STORE_') + + The resulting dictionary `image_store_config` would look like:: + + { + 'type': 'fs', + 'path': '/var/app/images', + 'base_url': 'http://img.website.com' + } + + This is often useful when configuration options map directly to + keyword arguments in functions or class constructors. + + :param namespace: a configuration namespace + :param lowercase: a flag indicating if the keys of the resulting + dictionary should be lowercase + :param trim_namespace: a flag indicating if the keys of the resulting + dictionary should not include the namespace + + .. versionadded:: 0.11 + """ + rv = {} + for k, v in self.items(): + if not k.startswith(namespace): + continue + if trim_namespace: + key = k[len(namespace) :] + else: + key = k + if lowercase: + key = key.lower() + rv[key] = v + return rv + + def __repr__(self) -> str: + return f"<{type(self).__name__} {dict.__repr__(self)}>" diff --git a/backend/test/lib/python3.8/site-packages/flask/ctx.py b/backend/test/lib/python3.8/site-packages/flask/ctx.py new file mode 100644 index 0000000000000000000000000000000000000000..b37e4e04a6c69254ba0f755813fccbeab9715ba4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/ctx.py @@ -0,0 +1,440 @@ +from __future__ import annotations + +import contextvars +import sys +import typing as t +from functools import update_wrapper +from types import TracebackType + +from werkzeug.exceptions import HTTPException + +from . import typing as ft +from .globals import _cv_app +from .globals import _cv_request +from .signals import appcontext_popped +from .signals import appcontext_pushed + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + from .sessions import SessionMixin + from .wrappers import Request + + +# a singleton sentinel value for parameter defaults +_sentinel = object() + + +class _AppCtxGlobals: + """A plain object. Used as a namespace for storing data during an + application context. + + Creating an app context automatically creates this object, which is + made available as the :data:`g` proxy. + + .. describe:: 'key' in g + + Check whether an attribute is present. + + .. versionadded:: 0.10 + + .. describe:: iter(g) + + Return an iterator over the attribute names. + + .. versionadded:: 0.10 + """ + + # Define attr methods to let mypy know this is a namespace object + # that has arbitrary attributes. + + def __getattr__(self, name: str) -> t.Any: + try: + return self.__dict__[name] + except KeyError: + raise AttributeError(name) from None + + def __setattr__(self, name: str, value: t.Any) -> None: + self.__dict__[name] = value + + def __delattr__(self, name: str) -> None: + try: + del self.__dict__[name] + except KeyError: + raise AttributeError(name) from None + + def get(self, name: str, default: t.Any | None = None) -> t.Any: + """Get an attribute by name, or a default value. Like + :meth:`dict.get`. + + :param name: Name of attribute to get. + :param default: Value to return if the attribute is not present. + + .. versionadded:: 0.10 + """ + return self.__dict__.get(name, default) + + def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: + """Get and remove an attribute by name. Like :meth:`dict.pop`. + + :param name: Name of attribute to pop. + :param default: Value to return if the attribute is not present, + instead of raising a ``KeyError``. + + .. versionadded:: 0.11 + """ + if default is _sentinel: + return self.__dict__.pop(name) + else: + return self.__dict__.pop(name, default) + + def setdefault(self, name: str, default: t.Any = None) -> t.Any: + """Get the value of an attribute if it is present, otherwise + set and return a default value. Like :meth:`dict.setdefault`. + + :param name: Name of attribute to get. + :param default: Value to set and return if the attribute is not + present. + + .. versionadded:: 0.11 + """ + return self.__dict__.setdefault(name, default) + + def __contains__(self, item: str) -> bool: + return item in self.__dict__ + + def __iter__(self) -> t.Iterator[str]: + return iter(self.__dict__) + + def __repr__(self) -> str: + ctx = _cv_app.get(None) + if ctx is not None: + return f"<flask.g of '{ctx.app.name}'>" + return object.__repr__(self) + + +def after_this_request(f: ft.AfterRequestCallable) -> ft.AfterRequestCallable: + """Executes a function after this request. This is useful to modify + response objects. The function is passed the response object and has + to return the same or a new one. + + Example:: + + @app.route('/') + def index(): + @after_this_request + def add_header(response): + response.headers['X-Foo'] = 'Parachute' + return response + return 'Hello World!' + + This is more useful if a function other than the view function wants to + modify a response. For instance think of a decorator that wants to add + some headers without converting the return value into a response object. + + .. versionadded:: 0.9 + """ + ctx = _cv_request.get(None) + + if ctx is None: + raise RuntimeError( + "'after_this_request' can only be used when a request" + " context is active, such as in a view function." + ) + + ctx._after_request_functions.append(f) + return f + + +def copy_current_request_context(f: t.Callable) -> t.Callable: + """A helper function that decorates a function to retain the current + request context. This is useful when working with greenlets. The moment + the function is decorated a copy of the request context is created and + then pushed when the function is called. The current session is also + included in the copied request context. + + Example:: + + import gevent + from flask import copy_current_request_context + + @app.route('/') + def index(): + @copy_current_request_context + def do_some_work(): + # do some work here, it can access flask.request or + # flask.session like you would otherwise in the view function. + ... + gevent.spawn(do_some_work) + return 'Regular response' + + .. versionadded:: 0.10 + """ + ctx = _cv_request.get(None) + + if ctx is None: + raise RuntimeError( + "'copy_current_request_context' can only be used when a" + " request context is active, such as in a view function." + ) + + ctx = ctx.copy() + + def wrapper(*args, **kwargs): + with ctx: + return ctx.app.ensure_sync(f)(*args, **kwargs) + + return update_wrapper(wrapper, f) + + +def has_request_context() -> bool: + """If you have code that wants to test if a request context is there or + not this function can be used. For instance, you may want to take advantage + of request information if the request object is available, but fail + silently if it is unavailable. + + :: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and has_request_context(): + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + Alternatively you can also just test any of the context bound objects + (such as :class:`request` or :class:`g`) for truthness:: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and request: + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + .. versionadded:: 0.7 + """ + return _cv_request.get(None) is not None + + +def has_app_context() -> bool: + """Works like :func:`has_request_context` but for the application + context. You can also just do a boolean check on the + :data:`current_app` object instead. + + .. versionadded:: 0.9 + """ + return _cv_app.get(None) is not None + + +class AppContext: + """The app context contains application-specific information. An app + context is created and pushed at the beginning of each request if + one is not already active. An app context is also pushed when + running CLI commands. + """ + + def __init__(self, app: Flask) -> None: + self.app = app + self.url_adapter = app.create_url_adapter(None) + self.g: _AppCtxGlobals = app.app_ctx_globals_class() + self._cv_tokens: list[contextvars.Token] = [] + + def push(self) -> None: + """Binds the app context to the current context.""" + self._cv_tokens.append(_cv_app.set(self)) + appcontext_pushed.send(self.app, _async_wrapper=self.app.ensure_sync) + + def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore + """Pops the app context.""" + try: + if len(self._cv_tokens) == 1: + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_appcontext(exc) + finally: + ctx = _cv_app.get() + _cv_app.reset(self._cv_tokens.pop()) + + if ctx is not self: + raise AssertionError( + f"Popped wrong app context. ({ctx!r} instead of {self!r})" + ) + + appcontext_popped.send(self.app, _async_wrapper=self.app.ensure_sync) + + def __enter__(self) -> AppContext: + self.push() + return self + + def __exit__( + self, + exc_type: type | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.pop(exc_value) + + +class RequestContext: + """The request context contains per-request information. The Flask + app creates and pushes it at the beginning of the request, then pops + it at the end of the request. It will create the URL adapter and + request object for the WSGI environment provided. + + Do not attempt to use this class directly, instead use + :meth:`~flask.Flask.test_request_context` and + :meth:`~flask.Flask.request_context` to create this object. + + When the request context is popped, it will evaluate all the + functions registered on the application for teardown execution + (:meth:`~flask.Flask.teardown_request`). + + The request context is automatically popped at the end of the + request. When using the interactive debugger, the context will be + restored so ``request`` is still accessible. Similarly, the test + client can preserve the context after the request ends. However, + teardown functions may already have closed some resources such as + database connections. + """ + + def __init__( + self, + app: Flask, + environ: dict, + request: Request | None = None, + session: SessionMixin | None = None, + ) -> None: + self.app = app + if request is None: + request = app.request_class(environ) + request.json_module = app.json + self.request: Request = request + self.url_adapter = None + try: + self.url_adapter = app.create_url_adapter(self.request) + except HTTPException as e: + self.request.routing_exception = e + self.flashes: list[tuple[str, str]] | None = None + self.session: SessionMixin | None = session + # Functions that should be executed after the request on the response + # object. These will be called before the regular "after_request" + # functions. + self._after_request_functions: list[ft.AfterRequestCallable] = [] + + self._cv_tokens: list[tuple[contextvars.Token, AppContext | None]] = [] + + def copy(self) -> RequestContext: + """Creates a copy of this request context with the same request object. + This can be used to move a request context to a different greenlet. + Because the actual request object is the same this cannot be used to + move a request context to a different thread unless access to the + request object is locked. + + .. versionadded:: 0.10 + + .. versionchanged:: 1.1 + The current session object is used instead of reloading the original + data. This prevents `flask.session` pointing to an out-of-date object. + """ + return self.__class__( + self.app, + environ=self.request.environ, + request=self.request, + session=self.session, + ) + + def match_request(self) -> None: + """Can be overridden by a subclass to hook into the matching + of the request. + """ + try: + result = self.url_adapter.match(return_rule=True) # type: ignore + self.request.url_rule, self.request.view_args = result # type: ignore + except HTTPException as e: + self.request.routing_exception = e + + def push(self) -> None: + # Before we push the request context we have to ensure that there + # is an application context. + app_ctx = _cv_app.get(None) + + if app_ctx is None or app_ctx.app is not self.app: + app_ctx = self.app.app_context() + app_ctx.push() + else: + app_ctx = None + + self._cv_tokens.append((_cv_request.set(self), app_ctx)) + + # Open the session at the moment that the request context is available. + # This allows a custom open_session method to use the request context. + # Only open a new session if this is the first time the request was + # pushed, otherwise stream_with_context loses the session. + if self.session is None: + session_interface = self.app.session_interface + self.session = session_interface.open_session(self.app, self.request) + + if self.session is None: + self.session = session_interface.make_null_session(self.app) + + # Match the request URL after loading the session, so that the + # session is available in custom URL converters. + if self.url_adapter is not None: + self.match_request() + + def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore + """Pops the request context and unbinds it by doing that. This will + also trigger the execution of functions registered by the + :meth:`~flask.Flask.teardown_request` decorator. + + .. versionchanged:: 0.9 + Added the `exc` argument. + """ + clear_request = len(self._cv_tokens) == 1 + + try: + if clear_request: + if exc is _sentinel: + exc = sys.exc_info()[1] + self.app.do_teardown_request(exc) + + request_close = getattr(self.request, "close", None) + if request_close is not None: + request_close() + finally: + ctx = _cv_request.get() + token, app_ctx = self._cv_tokens.pop() + _cv_request.reset(token) + + # get rid of circular dependencies at the end of the request + # so that we don't require the GC to be active. + if clear_request: + ctx.request.environ["werkzeug.request"] = None + + if app_ctx is not None: + app_ctx.pop(exc) + + if ctx is not self: + raise AssertionError( + f"Popped wrong request context. ({ctx!r} instead of {self!r})" + ) + + def __enter__(self) -> RequestContext: + self.push() + return self + + def __exit__( + self, + exc_type: type | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.pop(exc_value) + + def __repr__(self) -> str: + return ( + f"<{type(self).__name__} {self.request.url!r}" + f" [{self.request.method}] of {self.app.name}>" + ) diff --git a/backend/test/lib/python3.8/site-packages/flask/debughelpers.py b/backend/test/lib/python3.8/site-packages/flask/debughelpers.py new file mode 100644 index 0000000000000000000000000000000000000000..6061441a89fbf984ff6414b0b597192e7bca5bbc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/debughelpers.py @@ -0,0 +1,160 @@ +from __future__ import annotations + +import typing as t + +from .app import Flask +from .blueprints import Blueprint +from .globals import request_ctx + + +class UnexpectedUnicodeError(AssertionError, UnicodeError): + """Raised in places where we want some better error reporting for + unexpected unicode or binary data. + """ + + +class DebugFilesKeyError(KeyError, AssertionError): + """Raised from request.files during debugging. The idea is that it can + provide a better error message than just a generic KeyError/BadRequest. + """ + + def __init__(self, request, key): + form_matches = request.form.getlist(key) + buf = [ + f"You tried to access the file {key!r} in the request.files" + " dictionary but it does not exist. The mimetype for the" + f" request is {request.mimetype!r} instead of" + " 'multipart/form-data' which means that no file contents" + " were transmitted. To fix this error you should provide" + ' enctype="multipart/form-data" in your form.' + ] + if form_matches: + names = ", ".join(repr(x) for x in form_matches) + buf.append( + "\n\nThe browser instead transmitted some file names. " + f"This was submitted: {names}" + ) + self.msg = "".join(buf) + + def __str__(self): + return self.msg + + +class FormDataRoutingRedirect(AssertionError): + """This exception is raised in debug mode if a routing redirect + would cause the browser to drop the method or body. This happens + when method is not GET, HEAD or OPTIONS and the status code is not + 307 or 308. + """ + + def __init__(self, request): + exc = request.routing_exception + buf = [ + f"A request was sent to '{request.url}', but routing issued" + f" a redirect to the canonical URL '{exc.new_url}'." + ] + + if f"{request.base_url}/" == exc.new_url.partition("?")[0]: + buf.append( + " The URL was defined with a trailing slash. Flask" + " will redirect to the URL with a trailing slash if it" + " was accessed without one." + ) + + buf.append( + " Send requests to the canonical URL, or use 307 or 308 for" + " routing redirects. Otherwise, browsers will drop form" + " data.\n\n" + "This exception is only raised in debug mode." + ) + super().__init__("".join(buf)) + + +def attach_enctype_error_multidict(request): + """Patch ``request.files.__getitem__`` to raise a descriptive error + about ``enctype=multipart/form-data``. + + :param request: The request to patch. + :meta private: + """ + oldcls = request.files.__class__ + + class newcls(oldcls): + def __getitem__(self, key): + try: + return super().__getitem__(key) + except KeyError as e: + if key not in request.form: + raise + + raise DebugFilesKeyError(request, key).with_traceback( + e.__traceback__ + ) from None + + newcls.__name__ = oldcls.__name__ + newcls.__module__ = oldcls.__module__ + request.files.__class__ = newcls + + +def _dump_loader_info(loader) -> t.Generator: + yield f"class: {type(loader).__module__}.{type(loader).__name__}" + for key, value in sorted(loader.__dict__.items()): + if key.startswith("_"): + continue + if isinstance(value, (tuple, list)): + if not all(isinstance(x, str) for x in value): + continue + yield f"{key}:" + for item in value: + yield f" - {item}" + continue + elif not isinstance(value, (str, int, float, bool)): + continue + yield f"{key}: {value!r}" + + +def explain_template_loading_attempts(app: Flask, template, attempts) -> None: + """This should help developers understand what failed""" + info = [f"Locating template {template!r}:"] + total_found = 0 + blueprint = None + if request_ctx and request_ctx.request.blueprint is not None: + blueprint = request_ctx.request.blueprint + + for idx, (loader, srcobj, triple) in enumerate(attempts): + if isinstance(srcobj, Flask): + src_info = f"application {srcobj.import_name!r}" + elif isinstance(srcobj, Blueprint): + src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" + else: + src_info = repr(srcobj) + + info.append(f"{idx + 1:5}: trying loader of {src_info}") + + for line in _dump_loader_info(loader): + info.append(f" {line}") + + if triple is None: + detail = "no match" + else: + detail = f"found ({triple[1] or '<string>'!r})" + total_found += 1 + info.append(f" -> {detail}") + + seems_fishy = False + if total_found == 0: + info.append("Error: the template could not be found.") + seems_fishy = True + elif total_found > 1: + info.append("Warning: multiple loaders returned a match for the template.") + seems_fishy = True + + if blueprint is not None and seems_fishy: + info.append( + " The template was looked up from an endpoint that belongs" + f" to the blueprint {blueprint!r}." + ) + info.append(" Maybe you did not place a template in the right folder?") + info.append(" See https://flask.palletsprojects.com/blueprints/#templates") + + app.logger.info("\n".join(info)) diff --git a/backend/test/lib/python3.8/site-packages/flask/globals.py b/backend/test/lib/python3.8/site-packages/flask/globals.py new file mode 100644 index 0000000000000000000000000000000000000000..e9cd4acfcda8a51dda20c604678fd706709070b2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/globals.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +import typing as t +from contextvars import ContextVar + +from werkzeug.local import LocalProxy + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + from .ctx import _AppCtxGlobals + from .ctx import AppContext + from .ctx import RequestContext + from .sessions import SessionMixin + from .wrappers import Request + + +class _FakeStack: + def __init__(self, name: str, cv: ContextVar[t.Any]) -> None: + self.name = name + self.cv = cv + + @property + def top(self) -> t.Any | None: + import warnings + + warnings.warn( + f"'_{self.name}_ctx_stack' is deprecated and will be removed in Flask 2.4." + f" Use 'g' to store data, or '{self.name}_ctx' to access the current" + " context.", + DeprecationWarning, + stacklevel=2, + ) + return self.cv.get(None) + + +_no_app_msg = """\ +Working outside of application context. + +This typically means that you attempted to use functionality that needed +the current application. To solve this, set up an application context +with app.app_context(). See the documentation for more information.\ +""" +_cv_app: ContextVar[AppContext] = ContextVar("flask.app_ctx") +__app_ctx_stack = _FakeStack("app", _cv_app) +app_ctx: AppContext = LocalProxy( # type: ignore[assignment] + _cv_app, unbound_message=_no_app_msg +) +current_app: Flask = LocalProxy( # type: ignore[assignment] + _cv_app, "app", unbound_message=_no_app_msg +) +g: _AppCtxGlobals = LocalProxy( # type: ignore[assignment] + _cv_app, "g", unbound_message=_no_app_msg +) + +_no_req_msg = """\ +Working outside of request context. + +This typically means that you attempted to use functionality that needed +an active HTTP request. Consult the documentation on testing for +information about how to avoid this problem.\ +""" +_cv_request: ContextVar[RequestContext] = ContextVar("flask.request_ctx") +__request_ctx_stack = _FakeStack("request", _cv_request) +request_ctx: RequestContext = LocalProxy( # type: ignore[assignment] + _cv_request, unbound_message=_no_req_msg +) +request: Request = LocalProxy( # type: ignore[assignment] + _cv_request, "request", unbound_message=_no_req_msg +) +session: SessionMixin = LocalProxy( # type: ignore[assignment] + _cv_request, "session", unbound_message=_no_req_msg +) + + +def __getattr__(name: str) -> t.Any: + if name == "_app_ctx_stack": + import warnings + + warnings.warn( + "'_app_ctx_stack' is deprecated and will be removed in Flask 2.4.", + DeprecationWarning, + stacklevel=2, + ) + return __app_ctx_stack + + if name == "_request_ctx_stack": + import warnings + + warnings.warn( + "'_request_ctx_stack' is deprecated and will be removed in Flask 2.4.", + DeprecationWarning, + stacklevel=2, + ) + return __request_ctx_stack + + raise AttributeError(name) diff --git a/backend/test/lib/python3.8/site-packages/flask/helpers.py b/backend/test/lib/python3.8/site-packages/flask/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..284c36965b527553d55dcf058c51110483061134 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/helpers.py @@ -0,0 +1,701 @@ +from __future__ import annotations + +import importlib.util +import os +import socket +import sys +import typing as t +import warnings +from datetime import datetime +from functools import lru_cache +from functools import update_wrapper +from threading import RLock + +import werkzeug.utils +from werkzeug.exceptions import abort as _wz_abort +from werkzeug.utils import redirect as _wz_redirect + +from .globals import _cv_request +from .globals import current_app +from .globals import request +from .globals import request_ctx +from .globals import session +from .signals import message_flashed + +if t.TYPE_CHECKING: # pragma: no cover + from werkzeug.wrappers import Response as BaseResponse + from .wrappers import Response + + +def get_debug_flag() -> bool: + """Get whether debug mode should be enabled for the app, indicated by the + :envvar:`FLASK_DEBUG` environment variable. The default is ``False``. + """ + val = os.environ.get("FLASK_DEBUG") + return bool(val and val.lower() not in {"0", "false", "no"}) + + +def get_load_dotenv(default: bool = True) -> bool: + """Get whether the user has disabled loading default dotenv files by + setting :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load + the files. + + :param default: What to return if the env var isn't set. + """ + val = os.environ.get("FLASK_SKIP_DOTENV") + + if not val: + return default + + return val.lower() in ("0", "false", "no") + + +def stream_with_context( + generator_or_function: ( + t.Iterator[t.AnyStr] | t.Callable[..., t.Iterator[t.AnyStr]] + ) +) -> t.Iterator[t.AnyStr]: + """Request contexts disappear when the response is started on the server. + This is done for efficiency reasons and to make it less likely to encounter + memory leaks with badly written WSGI middlewares. The downside is that if + you are using streamed responses, the generator cannot access request bound + information any more. + + This function however can help you keep the context around for longer:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + @stream_with_context + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(generate()) + + Alternatively it can also be used around a specific generator:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(stream_with_context(generate())) + + .. versionadded:: 0.9 + """ + try: + gen = iter(generator_or_function) # type: ignore + except TypeError: + + def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: + gen = generator_or_function(*args, **kwargs) # type: ignore + return stream_with_context(gen) + + return update_wrapper(decorator, generator_or_function) # type: ignore + + def generator() -> t.Generator: + ctx = _cv_request.get(None) + if ctx is None: + raise RuntimeError( + "'stream_with_context' can only be used when a request" + " context is active, such as in a view function." + ) + with ctx: + # Dummy sentinel. Has to be inside the context block or we're + # not actually keeping the context around. + yield None + + # The try/finally is here so that if someone passes a WSGI level + # iterator in we're still running the cleanup logic. Generators + # don't need that because they are closed on their destruction + # automatically. + try: + yield from gen + finally: + if hasattr(gen, "close"): + gen.close() + + # The trick is to start the generator. Then the code execution runs until + # the first dummy None is yielded at which point the context was already + # pushed. This item is discarded. Then when the iteration continues the + # real generator is executed. + wrapped_g = generator() + next(wrapped_g) + return wrapped_g + + +def make_response(*args: t.Any) -> Response: + """Sometimes it is necessary to set additional headers in a view. Because + views do not have to return response objects but can return a value that + is converted into a response object by Flask itself, it becomes tricky to + add headers to it. This function can be called instead of using a return + and you will get a response object which you can use to attach headers. + + If view looked like this and you want to add a new header:: + + def index(): + return render_template('index.html', foo=42) + + You can now do something like this:: + + def index(): + response = make_response(render_template('index.html', foo=42)) + response.headers['X-Parachutes'] = 'parachutes are cool' + return response + + This function accepts the very same arguments you can return from a + view function. This for example creates a response with a 404 error + code:: + + response = make_response(render_template('not_found.html'), 404) + + The other use case of this function is to force the return value of a + view function into a response which is helpful with view + decorators:: + + response = make_response(view_function()) + response.headers['X-Parachutes'] = 'parachutes are cool' + + Internally this function does the following things: + + - if no arguments are passed, it creates a new response argument + - if one argument is passed, :meth:`flask.Flask.make_response` + is invoked with it. + - if more than one argument is passed, the arguments are passed + to the :meth:`flask.Flask.make_response` function as tuple. + + .. versionadded:: 0.6 + """ + if not args: + return current_app.response_class() + if len(args) == 1: + args = args[0] + return current_app.make_response(args) # type: ignore + + +def url_for( + endpoint: str, + *, + _anchor: str | None = None, + _method: str | None = None, + _scheme: str | None = None, + _external: bool | None = None, + **values: t.Any, +) -> str: + """Generate a URL to the given endpoint with the given values. + + This requires an active request or application context, and calls + :meth:`current_app.url_for() <flask.Flask.url_for>`. See that method + for full documentation. + + :param endpoint: The endpoint name associated with the URL to + generate. If this starts with a ``.``, the current blueprint + name (if any) will be used. + :param _anchor: If given, append this as ``#anchor`` to the URL. + :param _method: If given, generate the URL associated with this + method for the endpoint. + :param _scheme: If given, the URL will have this scheme if it is + external. + :param _external: If given, prefer the URL to be internal (False) or + require it to be external (True). External URLs include the + scheme and domain. When not in an active request, URLs are + external by default. + :param values: Values to use for the variable parts of the URL rule. + Unknown keys are appended as query string arguments, like + ``?a=b&c=d``. + + .. versionchanged:: 2.2 + Calls ``current_app.url_for``, allowing an app to override the + behavior. + + .. versionchanged:: 0.10 + The ``_scheme`` parameter was added. + + .. versionchanged:: 0.9 + The ``_anchor`` and ``_method`` parameters were added. + + .. versionchanged:: 0.9 + Calls ``app.handle_url_build_error`` on build errors. + """ + return current_app.url_for( + endpoint, + _anchor=_anchor, + _method=_method, + _scheme=_scheme, + _external=_external, + **values, + ) + + +def redirect( + location: str, code: int = 302, Response: type[BaseResponse] | None = None +) -> BaseResponse: + """Create a redirect response object. + + If :data:`~flask.current_app` is available, it will use its + :meth:`~flask.Flask.redirect` method, otherwise it will use + :func:`werkzeug.utils.redirect`. + + :param location: The URL to redirect to. + :param code: The status code for the redirect. + :param Response: The response class to use. Not used when + ``current_app`` is active, which uses ``app.response_class``. + + .. versionadded:: 2.2 + Calls ``current_app.redirect`` if available instead of always + using Werkzeug's default ``redirect``. + """ + if current_app: + return current_app.redirect(location, code=code) + + return _wz_redirect(location, code=code, Response=Response) + + +def abort(code: int | BaseResponse, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: + """Raise an :exc:`~werkzeug.exceptions.HTTPException` for the given + status code. + + If :data:`~flask.current_app` is available, it will call its + :attr:`~flask.Flask.aborter` object, otherwise it will use + :func:`werkzeug.exceptions.abort`. + + :param code: The status code for the exception, which must be + registered in ``app.aborter``. + :param args: Passed to the exception. + :param kwargs: Passed to the exception. + + .. versionadded:: 2.2 + Calls ``current_app.aborter`` if available instead of always + using Werkzeug's default ``abort``. + """ + if current_app: + current_app.aborter(code, *args, **kwargs) + + _wz_abort(code, *args, **kwargs) + + +def get_template_attribute(template_name: str, attribute: str) -> t.Any: + """Loads a macro (or variable) a template exports. This can be used to + invoke a macro from within Python code. If you for example have a + template named :file:`_cider.html` with the following contents: + + .. sourcecode:: html+jinja + + {% macro hello(name) %}Hello {{ name }}!{% endmacro %} + + You can access this from Python code like this:: + + hello = get_template_attribute('_cider.html', 'hello') + return hello('World') + + .. versionadded:: 0.2 + + :param template_name: the name of the template + :param attribute: the name of the variable of macro to access + """ + return getattr(current_app.jinja_env.get_template(template_name).module, attribute) + + +def flash(message: str, category: str = "message") -> None: + """Flashes a message to the next request. In order to remove the + flashed message from the session and to display it to the user, + the template has to call :func:`get_flashed_messages`. + + .. versionchanged:: 0.3 + `category` parameter added. + + :param message: the message to be flashed. + :param category: the category for the message. The following values + are recommended: ``'message'`` for any kind of message, + ``'error'`` for errors, ``'info'`` for information + messages and ``'warning'`` for warnings. However any + kind of string can be used as category. + """ + # Original implementation: + # + # session.setdefault('_flashes', []).append((category, message)) + # + # This assumed that changes made to mutable structures in the session are + # always in sync with the session object, which is not true for session + # implementations that use external storage for keeping their keys/values. + flashes = session.get("_flashes", []) + flashes.append((category, message)) + session["_flashes"] = flashes + app = current_app._get_current_object() # type: ignore + message_flashed.send( + app, + _async_wrapper=app.ensure_sync, + message=message, + category=category, + ) + + +def get_flashed_messages( + with_categories: bool = False, category_filter: t.Iterable[str] = () +) -> list[str] | list[tuple[str, str]]: + """Pulls all flashed messages from the session and returns them. + Further calls in the same request to the function will return + the same messages. By default just the messages are returned, + but when `with_categories` is set to ``True``, the return value will + be a list of tuples in the form ``(category, message)`` instead. + + Filter the flashed messages to one or more categories by providing those + categories in `category_filter`. This allows rendering categories in + separate html blocks. The `with_categories` and `category_filter` + arguments are distinct: + + * `with_categories` controls whether categories are returned with message + text (``True`` gives a tuple, where ``False`` gives just the message text). + * `category_filter` filters the messages down to only those matching the + provided categories. + + See :doc:`/patterns/flashing` for examples. + + .. versionchanged:: 0.3 + `with_categories` parameter added. + + .. versionchanged:: 0.9 + `category_filter` parameter added. + + :param with_categories: set to ``True`` to also receive categories. + :param category_filter: filter of categories to limit return values. Only + categories in the list will be returned. + """ + flashes = request_ctx.flashes + if flashes is None: + flashes = session.pop("_flashes") if "_flashes" in session else [] + request_ctx.flashes = flashes + if category_filter: + flashes = list(filter(lambda f: f[0] in category_filter, flashes)) + if not with_categories: + return [x[1] for x in flashes] + return flashes + + +def _prepare_send_file_kwargs(**kwargs: t.Any) -> dict[str, t.Any]: + if kwargs.get("max_age") is None: + kwargs["max_age"] = current_app.get_send_file_max_age + + kwargs.update( + environ=request.environ, + use_x_sendfile=current_app.config["USE_X_SENDFILE"], + response_class=current_app.response_class, + _root_path=current_app.root_path, # type: ignore + ) + return kwargs + + +def send_file( + path_or_file: os.PathLike | str | t.BinaryIO, + mimetype: str | None = None, + as_attachment: bool = False, + download_name: str | None = None, + conditional: bool = True, + etag: bool | str = True, + last_modified: datetime | int | float | None = None, + max_age: None | (int | t.Callable[[str | None], int | None]) = None, +) -> Response: + """Send the contents of a file to the client. + + The first argument can be a file path or a file-like object. Paths + are preferred in most cases because Werkzeug can manage the file and + get extra information from the path. Passing a file-like object + requires that the file is opened in binary mode, and is mostly + useful when building a file in memory with :class:`io.BytesIO`. + + Never pass file paths provided by a user. The path is assumed to be + trusted, so a user could craft a path to access a file you didn't + intend. Use :func:`send_from_directory` to safely serve + user-requested paths from within a directory. + + If the WSGI server sets a ``file_wrapper`` in ``environ``, it is + used, otherwise Werkzeug's built-in wrapper is used. Alternatively, + if the HTTP server supports ``X-Sendfile``, configuring Flask with + ``USE_X_SENDFILE = True`` will tell the server to send the given + path, which is much more efficient than reading it in Python. + + :param path_or_file: The path to the file to send, relative to the + current working directory if a relative path is given. + Alternatively, a file-like object opened in binary mode. Make + sure the file pointer is seeked to the start of the data. + :param mimetype: The MIME type to send for the file. If not + provided, it will try to detect it from the file name. + :param as_attachment: Indicate to a browser that it should offer to + save the file instead of displaying it. + :param download_name: The default name browsers will use when saving + the file. Defaults to the passed file name. + :param conditional: Enable conditional and range responses based on + request headers. Requires passing a file path and ``environ``. + :param etag: Calculate an ETag for the file, which requires passing + a file path. Can also be a string to use instead. + :param last_modified: The last modified time to send for the file, + in seconds. If not provided, it will try to detect it from the + file path. + :param max_age: How long the client should cache the file, in + seconds. If set, ``Cache-Control`` will be ``public``, otherwise + it will be ``no-cache`` to prefer conditional caching. + + .. versionchanged:: 2.0 + ``download_name`` replaces the ``attachment_filename`` + parameter. If ``as_attachment=False``, it is passed with + ``Content-Disposition: inline`` instead. + + .. versionchanged:: 2.0 + ``max_age`` replaces the ``cache_timeout`` parameter. + ``conditional`` is enabled and ``max_age`` is not set by + default. + + .. versionchanged:: 2.0 + ``etag`` replaces the ``add_etags`` parameter. It can be a + string to use instead of generating one. + + .. versionchanged:: 2.0 + Passing a file-like object that inherits from + :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather + than sending an empty file. + + .. versionadded:: 2.0 + Moved the implementation to Werkzeug. This is now a wrapper to + pass some Flask-specific arguments. + + .. versionchanged:: 1.1 + ``filename`` may be a :class:`~os.PathLike` object. + + .. versionchanged:: 1.1 + Passing a :class:`~io.BytesIO` object supports range requests. + + .. versionchanged:: 1.0.3 + Filenames are encoded with ASCII instead of Latin-1 for broader + compatibility with WSGI servers. + + .. versionchanged:: 1.0 + UTF-8 filenames as specified in :rfc:`2231` are supported. + + .. versionchanged:: 0.12 + The filename is no longer automatically inferred from file + objects. If you want to use automatic MIME and etag support, + pass a filename via ``filename_or_fp`` or + ``attachment_filename``. + + .. versionchanged:: 0.12 + ``attachment_filename`` is preferred over ``filename`` for MIME + detection. + + .. versionchanged:: 0.9 + ``cache_timeout`` defaults to + :meth:`Flask.get_send_file_max_age`. + + .. versionchanged:: 0.7 + MIME guessing and etag support for file-like objects was + deprecated because it was unreliable. Pass a filename if you are + able to, otherwise attach an etag yourself. + + .. versionchanged:: 0.5 + The ``add_etags``, ``cache_timeout`` and ``conditional`` + parameters were added. The default behavior is to add etags. + + .. versionadded:: 0.2 + """ + return werkzeug.utils.send_file( # type: ignore[return-value] + **_prepare_send_file_kwargs( + path_or_file=path_or_file, + environ=request.environ, + mimetype=mimetype, + as_attachment=as_attachment, + download_name=download_name, + conditional=conditional, + etag=etag, + last_modified=last_modified, + max_age=max_age, + ) + ) + + +def send_from_directory( + directory: os.PathLike | str, + path: os.PathLike | str, + **kwargs: t.Any, +) -> Response: + """Send a file from within a directory using :func:`send_file`. + + .. code-block:: python + + @app.route("/uploads/<path:name>") + def download_file(name): + return send_from_directory( + app.config['UPLOAD_FOLDER'], name, as_attachment=True + ) + + This is a secure way to serve files from a folder, such as static + files or uploads. Uses :func:`~werkzeug.security.safe_join` to + ensure the path coming from the client is not maliciously crafted to + point outside the specified directory. + + If the final path does not point to an existing regular file, + raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. + + :param directory: The directory that ``path`` must be located under, + relative to the current application's root path. + :param path: The path to the file to send, relative to + ``directory``. + :param kwargs: Arguments to pass to :func:`send_file`. + + .. versionchanged:: 2.0 + ``path`` replaces the ``filename`` parameter. + + .. versionadded:: 2.0 + Moved the implementation to Werkzeug. This is now a wrapper to + pass some Flask-specific arguments. + + .. versionadded:: 0.5 + """ + return werkzeug.utils.send_from_directory( # type: ignore[return-value] + directory, path, **_prepare_send_file_kwargs(**kwargs) + ) + + +def get_root_path(import_name: str) -> str: + """Find the root path of a package, or the path that contains a + module. If it cannot be found, returns the current working + directory. + + Not to be confused with the value returned by :func:`find_package`. + + :meta private: + """ + # Module already imported and has a file attribute. Use that first. + mod = sys.modules.get(import_name) + + if mod is not None and hasattr(mod, "__file__") and mod.__file__ is not None: + return os.path.dirname(os.path.abspath(mod.__file__)) + + # Next attempt: check the loader. + try: + spec = importlib.util.find_spec(import_name) + + if spec is None: + raise ValueError + except (ImportError, ValueError): + loader = None + else: + loader = spec.loader + + # Loader does not exist or we're referring to an unloaded main + # module or a main module without path (interactive sessions), go + # with the current working directory. + if loader is None: + return os.getcwd() + + if hasattr(loader, "get_filename"): + filepath = loader.get_filename(import_name) + else: + # Fall back to imports. + __import__(import_name) + mod = sys.modules[import_name] + filepath = getattr(mod, "__file__", None) + + # If we don't have a file path it might be because it is a + # namespace package. In this case pick the root path from the + # first module that is contained in the package. + if filepath is None: + raise RuntimeError( + "No root path can be found for the provided module" + f" {import_name!r}. This can happen because the module" + " came from an import hook that does not provide file" + " name information or because it's a namespace package." + " In this case the root path needs to be explicitly" + " provided." + ) + + # filepath is import_name.py for a module, or __init__.py for a package. + return os.path.dirname(os.path.abspath(filepath)) + + +class locked_cached_property(werkzeug.utils.cached_property): + """A :func:`property` that is only evaluated once. Like + :class:`werkzeug.utils.cached_property` except access uses a lock + for thread safety. + + .. deprecated:: 2.3 + Will be removed in Flask 2.4. Use a lock inside the decorated function if + locking is needed. + + .. versionchanged:: 2.0 + Inherits from Werkzeug's ``cached_property`` (and ``property``). + """ + + def __init__( + self, + fget: t.Callable[[t.Any], t.Any], + name: str | None = None, + doc: str | None = None, + ) -> None: + import warnings + + warnings.warn( + "'locked_cached_property' is deprecated and will be removed in Flask 2.4." + " Use a lock inside the decorated function if locking is needed.", + DeprecationWarning, + stacklevel=2, + ) + super().__init__(fget, name=name, doc=doc) + self.lock = RLock() + + def __get__(self, obj: object, type: type = None) -> t.Any: # type: ignore + if obj is None: + return self + + with self.lock: + return super().__get__(obj, type=type) + + def __set__(self, obj: object, value: t.Any) -> None: + with self.lock: + super().__set__(obj, value) + + def __delete__(self, obj: object) -> None: + with self.lock: + super().__delete__(obj) + + +def is_ip(value: str) -> bool: + """Determine if the given string is an IP address. + + :param value: value to check + :type value: str + + :return: True if string is an IP address + :rtype: bool + + .. deprecated:: 2.3 + Will be removed in Flask 2.4. + """ + warnings.warn( + "The 'is_ip' function is deprecated and will be removed in Flask 2.4.", + DeprecationWarning, + stacklevel=2, + ) + + for family in (socket.AF_INET, socket.AF_INET6): + try: + socket.inet_pton(family, value) + except OSError: + pass + else: + return True + + return False + + +@lru_cache(maxsize=None) +def _split_blueprint_path(name: str) -> list[str]: + out: list[str] = [name] + + if "." in name: + out.extend(_split_blueprint_path(name.rpartition(".")[0])) + + return out diff --git a/backend/test/lib/python3.8/site-packages/flask/json/__init__.py b/backend/test/lib/python3.8/site-packages/flask/json/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f15296fed0fbf3aa4157429aed9266b803370e3d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/json/__init__.py @@ -0,0 +1,170 @@ +from __future__ import annotations + +import json as _json +import typing as t + +from ..globals import current_app +from .provider import _default + +if t.TYPE_CHECKING: # pragma: no cover + from ..wrappers import Response + + +def dumps(obj: t.Any, **kwargs: t.Any) -> str: + """Serialize data as JSON. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.dumps() <flask.json.provider.JSONProvider.dumps>` + method, otherwise it will use :func:`json.dumps`. + + :param obj: The data to serialize. + :param kwargs: Arguments passed to the ``dumps`` implementation. + + .. versionchanged:: 2.3 + The ``app`` parameter was removed. + + .. versionchanged:: 2.2 + Calls ``current_app.json.dumps``, allowing an app to override + the behavior. + + .. versionchanged:: 2.0.2 + :class:`decimal.Decimal` is supported by converting to a string. + + .. versionchanged:: 2.0 + ``encoding`` will be removed in Flask 2.1. + + .. versionchanged:: 1.0.3 + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + if current_app: + return current_app.json.dumps(obj, **kwargs) + + kwargs.setdefault("default", _default) + return _json.dumps(obj, **kwargs) + + +def dump(obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: + """Serialize data as JSON and write to a file. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.dump() <flask.json.provider.JSONProvider.dump>` + method, otherwise it will use :func:`json.dump`. + + :param obj: The data to serialize. + :param fp: A file opened for writing text. Should use the UTF-8 + encoding to be valid JSON. + :param kwargs: Arguments passed to the ``dump`` implementation. + + .. versionchanged:: 2.3 + The ``app`` parameter was removed. + + .. versionchanged:: 2.2 + Calls ``current_app.json.dump``, allowing an app to override + the behavior. + + .. versionchanged:: 2.0 + Writing to a binary file, and the ``encoding`` argument, will be + removed in Flask 2.1. + """ + if current_app: + current_app.json.dump(obj, fp, **kwargs) + else: + kwargs.setdefault("default", _default) + _json.dump(obj, fp, **kwargs) + + +def loads(s: str | bytes, **kwargs: t.Any) -> t.Any: + """Deserialize data as JSON. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.loads() <flask.json.provider.JSONProvider.loads>` + method, otherwise it will use :func:`json.loads`. + + :param s: Text or UTF-8 bytes. + :param kwargs: Arguments passed to the ``loads`` implementation. + + .. versionchanged:: 2.3 + The ``app`` parameter was removed. + + .. versionchanged:: 2.2 + Calls ``current_app.json.loads``, allowing an app to override + the behavior. + + .. versionchanged:: 2.0 + ``encoding`` will be removed in Flask 2.1. The data must be a + string or UTF-8 bytes. + + .. versionchanged:: 1.0.3 + ``app`` can be passed directly, rather than requiring an app + context for configuration. + """ + if current_app: + return current_app.json.loads(s, **kwargs) + + return _json.loads(s, **kwargs) + + +def load(fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: + """Deserialize data as JSON read from a file. + + If :data:`~flask.current_app` is available, it will use its + :meth:`app.json.load() <flask.json.provider.JSONProvider.load>` + method, otherwise it will use :func:`json.load`. + + :param fp: A file opened for reading text or UTF-8 bytes. + :param kwargs: Arguments passed to the ``load`` implementation. + + .. versionchanged:: 2.3 + The ``app`` parameter was removed. + + .. versionchanged:: 2.2 + Calls ``current_app.json.load``, allowing an app to override + the behavior. + + .. versionchanged:: 2.2 + The ``app`` parameter will be removed in Flask 2.3. + + .. versionchanged:: 2.0 + ``encoding`` will be removed in Flask 2.1. The file must be text + mode, or binary mode with UTF-8 bytes. + """ + if current_app: + return current_app.json.load(fp, **kwargs) + + return _json.load(fp, **kwargs) + + +def jsonify(*args: t.Any, **kwargs: t.Any) -> Response: + """Serialize the given arguments as JSON, and return a + :class:`~flask.Response` object with the ``application/json`` + mimetype. A dict or list returned from a view will be converted to a + JSON response automatically without needing to call this. + + This requires an active request or application context, and calls + :meth:`app.json.response() <flask.json.provider.JSONProvider.response>`. + + In debug mode, the output is formatted with indentation to make it + easier to read. This may also be controlled by the provider. + + Either positional or keyword arguments can be given, not both. + If no arguments are given, ``None`` is serialized. + + :param args: A single value to serialize, or multiple values to + treat as a list to serialize. + :param kwargs: Treat as a dict to serialize. + + .. versionchanged:: 2.2 + Calls ``current_app.json.response``, allowing an app to override + the behavior. + + .. versionchanged:: 2.0.2 + :class:`decimal.Decimal` is supported by converting to a string. + + .. versionchanged:: 0.11 + Added support for serializing top-level arrays. This was a + security risk in ancient browsers. See :ref:`security-json`. + + .. versionadded:: 0.2 + """ + return current_app.json.response(*args, **kwargs) diff --git a/backend/test/lib/python3.8/site-packages/flask/json/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/json/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..418761999fca0205a988f92296bf05bdb67e1763 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/json/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/json/__pycache__/provider.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/json/__pycache__/provider.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0bfed3052f7b4f44f5622bb2d9e9386644228019 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/json/__pycache__/provider.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/json/__pycache__/tag.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/flask/json/__pycache__/tag.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..957b8f93cd71520b577d972456ecf475ec043f1c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/flask/json/__pycache__/tag.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/flask/json/provider.py b/backend/test/lib/python3.8/site-packages/flask/json/provider.py new file mode 100644 index 0000000000000000000000000000000000000000..0edd3d58d7d9ca85e5fc216b24da0eacc16d47ec --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/json/provider.py @@ -0,0 +1,216 @@ +from __future__ import annotations + +import dataclasses +import decimal +import json +import typing as t +import uuid +import weakref +from datetime import date + +from werkzeug.http import http_date + +if t.TYPE_CHECKING: # pragma: no cover + from ..app import Flask + from ..wrappers import Response + + +class JSONProvider: + """A standard set of JSON operations for an application. Subclasses + of this can be used to customize JSON behavior or use different + JSON libraries. + + To implement a provider for a specific library, subclass this base + class and implement at least :meth:`dumps` and :meth:`loads`. All + other methods have default implementations. + + To use a different provider, either subclass ``Flask`` and set + :attr:`~flask.Flask.json_provider_class` to a provider class, or set + :attr:`app.json <flask.Flask.json>` to an instance of the class. + + :param app: An application instance. This will be stored as a + :class:`weakref.proxy` on the :attr:`_app` attribute. + + .. versionadded:: 2.2 + """ + + def __init__(self, app: Flask) -> None: + self._app = weakref.proxy(app) + + def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: + """Serialize data as JSON. + + :param obj: The data to serialize. + :param kwargs: May be passed to the underlying JSON library. + """ + raise NotImplementedError + + def dump(self, obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: + """Serialize data as JSON and write to a file. + + :param obj: The data to serialize. + :param fp: A file opened for writing text. Should use the UTF-8 + encoding to be valid JSON. + :param kwargs: May be passed to the underlying JSON library. + """ + fp.write(self.dumps(obj, **kwargs)) + + def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: + """Deserialize data as JSON. + + :param s: Text or UTF-8 bytes. + :param kwargs: May be passed to the underlying JSON library. + """ + raise NotImplementedError + + def load(self, fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: + """Deserialize data as JSON read from a file. + + :param fp: A file opened for reading text or UTF-8 bytes. + :param kwargs: May be passed to the underlying JSON library. + """ + return self.loads(fp.read(), **kwargs) + + def _prepare_response_obj( + self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] + ) -> t.Any: + if args and kwargs: + raise TypeError("app.json.response() takes either args or kwargs, not both") + + if not args and not kwargs: + return None + + if len(args) == 1: + return args[0] + + return args or kwargs + + def response(self, *args: t.Any, **kwargs: t.Any) -> Response: + """Serialize the given arguments as JSON, and return a + :class:`~flask.Response` object with the ``application/json`` + mimetype. + + The :func:`~flask.json.jsonify` function calls this method for + the current application. + + Either positional or keyword arguments can be given, not both. + If no arguments are given, ``None`` is serialized. + + :param args: A single value to serialize, or multiple values to + treat as a list to serialize. + :param kwargs: Treat as a dict to serialize. + """ + obj = self._prepare_response_obj(args, kwargs) + return self._app.response_class(self.dumps(obj), mimetype="application/json") + + +def _default(o: t.Any) -> t.Any: + if isinstance(o, date): + return http_date(o) + + if isinstance(o, (decimal.Decimal, uuid.UUID)): + return str(o) + + if dataclasses and dataclasses.is_dataclass(o): + return dataclasses.asdict(o) + + if hasattr(o, "__html__"): + return str(o.__html__()) + + raise TypeError(f"Object of type {type(o).__name__} is not JSON serializable") + + +class DefaultJSONProvider(JSONProvider): + """Provide JSON operations using Python's built-in :mod:`json` + library. Serializes the following additional data types: + + - :class:`datetime.datetime` and :class:`datetime.date` are + serialized to :rfc:`822` strings. This is the same as the HTTP + date format. + - :class:`uuid.UUID` is serialized to a string. + - :class:`dataclasses.dataclass` is passed to + :func:`dataclasses.asdict`. + - :class:`~markupsafe.Markup` (or any object with a ``__html__`` + method) will call the ``__html__`` method to get a string. + """ + + default: t.Callable[[t.Any], t.Any] = staticmethod( + _default + ) # type: ignore[assignment] + """Apply this function to any object that :meth:`json.dumps` does + not know how to serialize. It should return a valid JSON type or + raise a ``TypeError``. + """ + + ensure_ascii = True + """Replace non-ASCII characters with escape sequences. This may be + more compatible with some clients, but can be disabled for better + performance and size. + """ + + sort_keys = True + """Sort the keys in any serialized dicts. This may be useful for + some caching situations, but can be disabled for better performance. + When enabled, keys must all be strings, they are not converted + before sorting. + """ + + compact: bool | None = None + """If ``True``, or ``None`` out of debug mode, the :meth:`response` + output will not add indentation, newlines, or spaces. If ``False``, + or ``None`` in debug mode, it will use a non-compact representation. + """ + + mimetype = "application/json" + """The mimetype set in :meth:`response`.""" + + def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: + """Serialize data as JSON to a string. + + Keyword arguments are passed to :func:`json.dumps`. Sets some + parameter defaults from the :attr:`default`, + :attr:`ensure_ascii`, and :attr:`sort_keys` attributes. + + :param obj: The data to serialize. + :param kwargs: Passed to :func:`json.dumps`. + """ + kwargs.setdefault("default", self.default) + kwargs.setdefault("ensure_ascii", self.ensure_ascii) + kwargs.setdefault("sort_keys", self.sort_keys) + return json.dumps(obj, **kwargs) + + def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: + """Deserialize data as JSON from a string or bytes. + + :param s: Text or UTF-8 bytes. + :param kwargs: Passed to :func:`json.loads`. + """ + return json.loads(s, **kwargs) + + def response(self, *args: t.Any, **kwargs: t.Any) -> Response: + """Serialize the given arguments as JSON, and return a + :class:`~flask.Response` object with it. The response mimetype + will be "application/json" and can be changed with + :attr:`mimetype`. + + If :attr:`compact` is ``False`` or debug mode is enabled, the + output will be formatted to be easier to read. + + Either positional or keyword arguments can be given, not both. + If no arguments are given, ``None`` is serialized. + + :param args: A single value to serialize, or multiple values to + treat as a list to serialize. + :param kwargs: Treat as a dict to serialize. + """ + obj = self._prepare_response_obj(args, kwargs) + dump_args: dict[str, t.Any] = {} + + if (self.compact is None and self._app.debug) or self.compact is False: + dump_args.setdefault("indent", 2) + else: + dump_args.setdefault("separators", (",", ":")) + + return self._app.response_class( + f"{self.dumps(obj, **dump_args)}\n", mimetype=self.mimetype + ) diff --git a/backend/test/lib/python3.8/site-packages/flask/json/tag.py b/backend/test/lib/python3.8/site-packages/flask/json/tag.py new file mode 100644 index 0000000000000000000000000000000000000000..91cc4412d69b22bedc260d3cf598d135d706ed8f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/json/tag.py @@ -0,0 +1,314 @@ +""" +Tagged JSON +~~~~~~~~~~~ + +A compact representation for lossless serialization of non-standard JSON +types. :class:`~flask.sessions.SecureCookieSessionInterface` uses this +to serialize the session data, but it may be useful in other places. It +can be extended to support other types. + +.. autoclass:: TaggedJSONSerializer + :members: + +.. autoclass:: JSONTag + :members: + +Let's see an example that adds support for +:class:`~collections.OrderedDict`. Dicts don't have an order in JSON, so +to handle this we will dump the items as a list of ``[key, value]`` +pairs. Subclass :class:`JSONTag` and give it the new key ``' od'`` to +identify the type. The session serializer processes dicts first, so +insert the new tag at the front of the order since ``OrderedDict`` must +be processed before ``dict``. + +.. code-block:: python + + from flask.json.tag import JSONTag + + class TagOrderedDict(JSONTag): + __slots__ = ('serializer',) + key = ' od' + + def check(self, value): + return isinstance(value, OrderedDict) + + def to_json(self, value): + return [[k, self.serializer.tag(v)] for k, v in iteritems(value)] + + def to_python(self, value): + return OrderedDict(value) + + app.session_interface.serializer.register(TagOrderedDict, index=0) +""" +from __future__ import annotations + +import typing as t +from base64 import b64decode +from base64 import b64encode +from datetime import datetime +from uuid import UUID + +from markupsafe import Markup +from werkzeug.http import http_date +from werkzeug.http import parse_date + +from ..json import dumps +from ..json import loads + + +class JSONTag: + """Base class for defining type tags for :class:`TaggedJSONSerializer`.""" + + __slots__ = ("serializer",) + + #: The tag to mark the serialized object with. If ``None``, this tag is + #: only used as an intermediate step during tagging. + key: str | None = None + + def __init__(self, serializer: TaggedJSONSerializer) -> None: + """Create a tagger for the given serializer.""" + self.serializer = serializer + + def check(self, value: t.Any) -> bool: + """Check if the given value should be tagged by this tag.""" + raise NotImplementedError + + def to_json(self, value: t.Any) -> t.Any: + """Convert the Python object to an object that is a valid JSON type. + The tag will be added later.""" + raise NotImplementedError + + def to_python(self, value: t.Any) -> t.Any: + """Convert the JSON representation back to the correct type. The tag + will already be removed.""" + raise NotImplementedError + + def tag(self, value: t.Any) -> t.Any: + """Convert the value to a valid JSON type and add the tag structure + around it.""" + return {self.key: self.to_json(value)} + + +class TagDict(JSONTag): + """Tag for 1-item dicts whose only key matches a registered tag. + + Internally, the dict key is suffixed with `__`, and the suffix is removed + when deserializing. + """ + + __slots__ = () + key = " di" + + def check(self, value: t.Any) -> bool: + return ( + isinstance(value, dict) + and len(value) == 1 + and next(iter(value)) in self.serializer.tags + ) + + def to_json(self, value: t.Any) -> t.Any: + key = next(iter(value)) + return {f"{key}__": self.serializer.tag(value[key])} + + def to_python(self, value: t.Any) -> t.Any: + key = next(iter(value)) + return {key[:-2]: value[key]} + + +class PassDict(JSONTag): + __slots__ = () + + def check(self, value: t.Any) -> bool: + return isinstance(value, dict) + + def to_json(self, value: t.Any) -> t.Any: + # JSON objects may only have string keys, so don't bother tagging the + # key here. + return {k: self.serializer.tag(v) for k, v in value.items()} + + tag = to_json + + +class TagTuple(JSONTag): + __slots__ = () + key = " t" + + def check(self, value: t.Any) -> bool: + return isinstance(value, tuple) + + def to_json(self, value: t.Any) -> t.Any: + return [self.serializer.tag(item) for item in value] + + def to_python(self, value: t.Any) -> t.Any: + return tuple(value) + + +class PassList(JSONTag): + __slots__ = () + + def check(self, value: t.Any) -> bool: + return isinstance(value, list) + + def to_json(self, value: t.Any) -> t.Any: + return [self.serializer.tag(item) for item in value] + + tag = to_json + + +class TagBytes(JSONTag): + __slots__ = () + key = " b" + + def check(self, value: t.Any) -> bool: + return isinstance(value, bytes) + + def to_json(self, value: t.Any) -> t.Any: + return b64encode(value).decode("ascii") + + def to_python(self, value: t.Any) -> t.Any: + return b64decode(value) + + +class TagMarkup(JSONTag): + """Serialize anything matching the :class:`~markupsafe.Markup` API by + having a ``__html__`` method to the result of that method. Always + deserializes to an instance of :class:`~markupsafe.Markup`.""" + + __slots__ = () + key = " m" + + def check(self, value: t.Any) -> bool: + return callable(getattr(value, "__html__", None)) + + def to_json(self, value: t.Any) -> t.Any: + return str(value.__html__()) + + def to_python(self, value: t.Any) -> t.Any: + return Markup(value) + + +class TagUUID(JSONTag): + __slots__ = () + key = " u" + + def check(self, value: t.Any) -> bool: + return isinstance(value, UUID) + + def to_json(self, value: t.Any) -> t.Any: + return value.hex + + def to_python(self, value: t.Any) -> t.Any: + return UUID(value) + + +class TagDateTime(JSONTag): + __slots__ = () + key = " d" + + def check(self, value: t.Any) -> bool: + return isinstance(value, datetime) + + def to_json(self, value: t.Any) -> t.Any: + return http_date(value) + + def to_python(self, value: t.Any) -> t.Any: + return parse_date(value) + + +class TaggedJSONSerializer: + """Serializer that uses a tag system to compactly represent objects that + are not JSON types. Passed as the intermediate serializer to + :class:`itsdangerous.Serializer`. + + The following extra types are supported: + + * :class:`dict` + * :class:`tuple` + * :class:`bytes` + * :class:`~markupsafe.Markup` + * :class:`~uuid.UUID` + * :class:`~datetime.datetime` + """ + + __slots__ = ("tags", "order") + + #: Tag classes to bind when creating the serializer. Other tags can be + #: added later using :meth:`~register`. + default_tags = [ + TagDict, + PassDict, + TagTuple, + PassList, + TagBytes, + TagMarkup, + TagUUID, + TagDateTime, + ] + + def __init__(self) -> None: + self.tags: dict[str, JSONTag] = {} + self.order: list[JSONTag] = [] + + for cls in self.default_tags: + self.register(cls) + + def register( + self, + tag_class: type[JSONTag], + force: bool = False, + index: int | None = None, + ) -> None: + """Register a new tag with this serializer. + + :param tag_class: tag class to register. Will be instantiated with this + serializer instance. + :param force: overwrite an existing tag. If false (default), a + :exc:`KeyError` is raised. + :param index: index to insert the new tag in the tag order. Useful when + the new tag is a special case of an existing tag. If ``None`` + (default), the tag is appended to the end of the order. + + :raise KeyError: if the tag key is already registered and ``force`` is + not true. + """ + tag = tag_class(self) + key = tag.key + + if key is not None: + if not force and key in self.tags: + raise KeyError(f"Tag '{key}' is already registered.") + + self.tags[key] = tag + + if index is None: + self.order.append(tag) + else: + self.order.insert(index, tag) + + def tag(self, value: t.Any) -> dict[str, t.Any]: + """Convert a value to a tagged representation if necessary.""" + for tag in self.order: + if tag.check(value): + return tag.tag(value) + + return value + + def untag(self, value: dict[str, t.Any]) -> t.Any: + """Convert a tagged representation back to the original type.""" + if len(value) != 1: + return value + + key = next(iter(value)) + + if key not in self.tags: + return value + + return self.tags[key].to_python(value[key]) + + def dumps(self, value: t.Any) -> str: + """Tag the value and dump it to a compact JSON string.""" + return dumps(self.tag(value), separators=(",", ":")) + + def loads(self, value: str) -> t.Any: + """Load data from a JSON string and deserialized any tagged objects.""" + return loads(value, object_hook=self.untag) diff --git a/backend/test/lib/python3.8/site-packages/flask/logging.py b/backend/test/lib/python3.8/site-packages/flask/logging.py new file mode 100644 index 0000000000000000000000000000000000000000..99f6be85be8cf576729086d47e79d44aec53d6f5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/logging.py @@ -0,0 +1,76 @@ +from __future__ import annotations + +import logging +import sys +import typing as t + +from werkzeug.local import LocalProxy + +from .globals import request + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + + +@LocalProxy +def wsgi_errors_stream() -> t.TextIO: + """Find the most appropriate error stream for the application. If a request + is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``. + + If you configure your own :class:`logging.StreamHandler`, you may want to + use this for the stream. If you are using file or dict configuration and + can't import this directly, you can refer to it as + ``ext://flask.logging.wsgi_errors_stream``. + """ + return request.environ["wsgi.errors"] if request else sys.stderr + + +def has_level_handler(logger: logging.Logger) -> bool: + """Check if there is a handler in the logging chain that will handle the + given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`. + """ + level = logger.getEffectiveLevel() + current = logger + + while current: + if any(handler.level <= level for handler in current.handlers): + return True + + if not current.propagate: + break + + current = current.parent # type: ignore + + return False + + +#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format +#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``. +default_handler = logging.StreamHandler(wsgi_errors_stream) # type: ignore +default_handler.setFormatter( + logging.Formatter("[%(asctime)s] %(levelname)s in %(module)s: %(message)s") +) + + +def create_logger(app: Flask) -> logging.Logger: + """Get the Flask app's logger and configure it if needed. + + The logger name will be the same as + :attr:`app.import_name <flask.Flask.name>`. + + When :attr:`~flask.Flask.debug` is enabled, set the logger level to + :data:`logging.DEBUG` if it is not set. + + If there is no handler for the logger's effective level, add a + :class:`~logging.StreamHandler` for + :func:`~flask.logging.wsgi_errors_stream` with a basic format. + """ + logger = logging.getLogger(app.name) + + if app.debug and not logger.level: + logger.setLevel(logging.DEBUG) + + if not has_level_handler(logger): + logger.addHandler(default_handler) + + return logger diff --git a/backend/test/lib/python3.8/site-packages/flask/py.typed b/backend/test/lib/python3.8/site-packages/flask/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/flask/scaffold.py b/backend/test/lib/python3.8/site-packages/flask/scaffold.py new file mode 100644 index 0000000000000000000000000000000000000000..d15b87338d7da90229dc6b9bdc5b684d71b5fb1b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/scaffold.py @@ -0,0 +1,873 @@ +from __future__ import annotations + +import importlib.util +import os +import pathlib +import sys +import typing as t +from collections import defaultdict +from datetime import timedelta +from functools import update_wrapper + +from jinja2 import FileSystemLoader +from werkzeug.exceptions import default_exceptions +from werkzeug.exceptions import HTTPException +from werkzeug.utils import cached_property + +from . import typing as ft +from .cli import AppGroup +from .globals import current_app +from .helpers import get_root_path +from .helpers import send_from_directory +from .templating import _default_template_ctx_processor + +if t.TYPE_CHECKING: # pragma: no cover + from .wrappers import Response + +# a singleton sentinel value for parameter defaults +_sentinel = object() + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable) +T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) +T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) +T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) +T_template_context_processor = t.TypeVar( + "T_template_context_processor", bound=ft.TemplateContextProcessorCallable +) +T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) +T_url_value_preprocessor = t.TypeVar( + "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable +) +T_route = t.TypeVar("T_route", bound=ft.RouteCallable) + + +def setupmethod(f: F) -> F: + f_name = f.__name__ + + def wrapper_func(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + self._check_setup_finished(f_name) + return f(self, *args, **kwargs) + + return t.cast(F, update_wrapper(wrapper_func, f)) + + +class Scaffold: + """Common behavior shared between :class:`~flask.Flask` and + :class:`~flask.blueprints.Blueprint`. + + :param import_name: The import name of the module where this object + is defined. Usually :attr:`__name__` should be used. + :param static_folder: Path to a folder of static files to serve. + If this is set, a static route will be added. + :param static_url_path: URL prefix for the static route. + :param template_folder: Path to a folder containing template files. + for rendering. If this is set, a Jinja loader will be added. + :param root_path: The path that static, template, and resource files + are relative to. Typically not set, it is discovered based on + the ``import_name``. + + .. versionadded:: 2.0 + """ + + name: str + _static_folder: str | None = None + _static_url_path: str | None = None + + def __init__( + self, + import_name: str, + static_folder: str | os.PathLike | None = None, + static_url_path: str | None = None, + template_folder: str | os.PathLike | None = None, + root_path: str | None = None, + ): + #: The name of the package or module that this object belongs + #: to. Do not change this once it is set by the constructor. + self.import_name = import_name + + self.static_folder = static_folder # type: ignore + self.static_url_path = static_url_path + + #: The path to the templates folder, relative to + #: :attr:`root_path`, to add to the template loader. ``None`` if + #: templates should not be added. + self.template_folder = template_folder + + if root_path is None: + root_path = get_root_path(self.import_name) + + #: Absolute path to the package on the filesystem. Used to look + #: up resources contained in the package. + self.root_path = root_path + + #: The Click command group for registering CLI commands for this + #: object. The commands are available from the ``flask`` command + #: once the application has been discovered and blueprints have + #: been registered. + self.cli = AppGroup() + + #: A dictionary mapping endpoint names to view functions. + #: + #: To register a view function, use the :meth:`route` decorator. + #: + #: This data structure is internal. It should not be modified + #: directly and its format may change at any time. + self.view_functions: dict[str, t.Callable] = {} + + #: A data structure of registered error handlers, in the format + #: ``{scope: {code: {class: handler}}}``. The ``scope`` key is + #: the name of a blueprint the handlers are active for, or + #: ``None`` for all requests. The ``code`` key is the HTTP + #: status code for ``HTTPException``, or ``None`` for + #: other exceptions. The innermost dictionary maps exception + #: classes to handler functions. + #: + #: To register an error handler, use the :meth:`errorhandler` + #: decorator. + #: + #: This data structure is internal. It should not be modified + #: directly and its format may change at any time. + self.error_handler_spec: dict[ + ft.AppOrBlueprintKey, + dict[int | None, dict[type[Exception], ft.ErrorHandlerCallable]], + ] = defaultdict(lambda: defaultdict(dict)) + + #: A data structure of functions to call at the beginning of + #: each request, in the format ``{scope: [functions]}``. The + #: ``scope`` key is the name of a blueprint the functions are + #: active for, or ``None`` for all requests. + #: + #: To register a function, use the :meth:`before_request` + #: decorator. + #: + #: This data structure is internal. It should not be modified + #: directly and its format may change at any time. + self.before_request_funcs: dict[ + ft.AppOrBlueprintKey, list[ft.BeforeRequestCallable] + ] = defaultdict(list) + + #: A data structure of functions to call at the end of each + #: request, in the format ``{scope: [functions]}``. The + #: ``scope`` key is the name of a blueprint the functions are + #: active for, or ``None`` for all requests. + #: + #: To register a function, use the :meth:`after_request` + #: decorator. + #: + #: This data structure is internal. It should not be modified + #: directly and its format may change at any time. + self.after_request_funcs: dict[ + ft.AppOrBlueprintKey, list[ft.AfterRequestCallable] + ] = defaultdict(list) + + #: A data structure of functions to call at the end of each + #: request even if an exception is raised, in the format + #: ``{scope: [functions]}``. The ``scope`` key is the name of a + #: blueprint the functions are active for, or ``None`` for all + #: requests. + #: + #: To register a function, use the :meth:`teardown_request` + #: decorator. + #: + #: This data structure is internal. It should not be modified + #: directly and its format may change at any time. + self.teardown_request_funcs: dict[ + ft.AppOrBlueprintKey, list[ft.TeardownCallable] + ] = defaultdict(list) + + #: A data structure of functions to call to pass extra context + #: values when rendering templates, in the format + #: ``{scope: [functions]}``. The ``scope`` key is the name of a + #: blueprint the functions are active for, or ``None`` for all + #: requests. + #: + #: To register a function, use the :meth:`context_processor` + #: decorator. + #: + #: This data structure is internal. It should not be modified + #: directly and its format may change at any time. + self.template_context_processors: dict[ + ft.AppOrBlueprintKey, list[ft.TemplateContextProcessorCallable] + ] = defaultdict(list, {None: [_default_template_ctx_processor]}) + + #: A data structure of functions to call to modify the keyword + #: arguments passed to the view function, in the format + #: ``{scope: [functions]}``. The ``scope`` key is the name of a + #: blueprint the functions are active for, or ``None`` for all + #: requests. + #: + #: To register a function, use the + #: :meth:`url_value_preprocessor` decorator. + #: + #: This data structure is internal. It should not be modified + #: directly and its format may change at any time. + self.url_value_preprocessors: dict[ + ft.AppOrBlueprintKey, + list[ft.URLValuePreprocessorCallable], + ] = defaultdict(list) + + #: A data structure of functions to call to modify the keyword + #: arguments when generating URLs, in the format + #: ``{scope: [functions]}``. The ``scope`` key is the name of a + #: blueprint the functions are active for, or ``None`` for all + #: requests. + #: + #: To register a function, use the :meth:`url_defaults` + #: decorator. + #: + #: This data structure is internal. It should not be modified + #: directly and its format may change at any time. + self.url_default_functions: dict[ + ft.AppOrBlueprintKey, list[ft.URLDefaultCallable] + ] = defaultdict(list) + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.name!r}>" + + def _check_setup_finished(self, f_name: str) -> None: + raise NotImplementedError + + @property + def static_folder(self) -> str | None: + """The absolute path to the configured static folder. ``None`` + if no static folder is set. + """ + if self._static_folder is not None: + return os.path.join(self.root_path, self._static_folder) + else: + return None + + @static_folder.setter + def static_folder(self, value: str | os.PathLike | None) -> None: + if value is not None: + value = os.fspath(value).rstrip(r"\/") + + self._static_folder = value + + @property + def has_static_folder(self) -> bool: + """``True`` if :attr:`static_folder` is set. + + .. versionadded:: 0.5 + """ + return self.static_folder is not None + + @property + def static_url_path(self) -> str | None: + """The URL prefix that the static route will be accessible from. + + If it was not configured during init, it is derived from + :attr:`static_folder`. + """ + if self._static_url_path is not None: + return self._static_url_path + + if self.static_folder is not None: + basename = os.path.basename(self.static_folder) + return f"/{basename}".rstrip("/") + + return None + + @static_url_path.setter + def static_url_path(self, value: str | None) -> None: + if value is not None: + value = value.rstrip("/") + + self._static_url_path = value + + def get_send_file_max_age(self, filename: str | None) -> int | None: + """Used by :func:`send_file` to determine the ``max_age`` cache + value for a given file path if it wasn't passed. + + By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from + the configuration of :data:`~flask.current_app`. This defaults + to ``None``, which tells the browser to use conditional requests + instead of a timed cache, which is usually preferable. + + .. versionchanged:: 2.0 + The default configuration is ``None`` instead of 12 hours. + + .. versionadded:: 0.9 + """ + value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] + + if value is None: + return None + + if isinstance(value, timedelta): + return int(value.total_seconds()) + + return value + + def send_static_file(self, filename: str) -> Response: + """The view function used to serve files from + :attr:`static_folder`. A route is automatically registered for + this view at :attr:`static_url_path` if :attr:`static_folder` is + set. + + .. versionadded:: 0.5 + """ + if not self.has_static_folder: + raise RuntimeError("'static_folder' must be set to serve static_files.") + + # send_file only knows to call get_send_file_max_age on the app, + # call it here so it works for blueprints too. + max_age = self.get_send_file_max_age(filename) + return send_from_directory( + t.cast(str, self.static_folder), filename, max_age=max_age + ) + + @cached_property + def jinja_loader(self) -> FileSystemLoader | None: + """The Jinja loader for this object's templates. By default this + is a class :class:`jinja2.loaders.FileSystemLoader` to + :attr:`template_folder` if it is set. + + .. versionadded:: 0.5 + """ + if self.template_folder is not None: + return FileSystemLoader(os.path.join(self.root_path, self.template_folder)) + else: + return None + + def open_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: + """Open a resource file relative to :attr:`root_path` for + reading. + + For example, if the file ``schema.sql`` is next to the file + ``app.py`` where the ``Flask`` app is defined, it can be opened + with: + + .. code-block:: python + + with app.open_resource("schema.sql") as f: + conn.executescript(f.read()) + + :param resource: Path to the resource relative to + :attr:`root_path`. + :param mode: Open the file in this mode. Only reading is + supported, valid values are "r" (or "rt") and "rb". + """ + if mode not in {"r", "rt", "rb"}: + raise ValueError("Resources can only be opened for reading.") + + return open(os.path.join(self.root_path, resource), mode) + + def _method_route( + self, + method: str, + rule: str, + options: dict, + ) -> t.Callable[[T_route], T_route]: + if "methods" in options: + raise TypeError("Use the 'route' decorator to use the 'methods' argument.") + + return self.route(rule, methods=[method], **options) + + @setupmethod + def get(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: + """Shortcut for :meth:`route` with ``methods=["GET"]``. + + .. versionadded:: 2.0 + """ + return self._method_route("GET", rule, options) + + @setupmethod + def post(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: + """Shortcut for :meth:`route` with ``methods=["POST"]``. + + .. versionadded:: 2.0 + """ + return self._method_route("POST", rule, options) + + @setupmethod + def put(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: + """Shortcut for :meth:`route` with ``methods=["PUT"]``. + + .. versionadded:: 2.0 + """ + return self._method_route("PUT", rule, options) + + @setupmethod + def delete(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: + """Shortcut for :meth:`route` with ``methods=["DELETE"]``. + + .. versionadded:: 2.0 + """ + return self._method_route("DELETE", rule, options) + + @setupmethod + def patch(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: + """Shortcut for :meth:`route` with ``methods=["PATCH"]``. + + .. versionadded:: 2.0 + """ + return self._method_route("PATCH", rule, options) + + @setupmethod + def route(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: + """Decorate a view function to register it with the given URL + rule and options. Calls :meth:`add_url_rule`, which has more + details about the implementation. + + .. code-block:: python + + @app.route("/") + def index(): + return "Hello, World!" + + See :ref:`url-route-registrations`. + + The endpoint name for the route defaults to the name of the view + function if the ``endpoint`` parameter isn't passed. + + The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` and + ``OPTIONS`` are added automatically. + + :param rule: The URL rule string. + :param options: Extra options passed to the + :class:`~werkzeug.routing.Rule` object. + """ + + def decorator(f: T_route) -> T_route: + endpoint = options.pop("endpoint", None) + self.add_url_rule(rule, endpoint, f, **options) + return f + + return decorator + + @setupmethod + def add_url_rule( + self, + rule: str, + endpoint: str | None = None, + view_func: ft.RouteCallable | None = None, + provide_automatic_options: bool | None = None, + **options: t.Any, + ) -> None: + """Register a rule for routing incoming requests and building + URLs. The :meth:`route` decorator is a shortcut to call this + with the ``view_func`` argument. These are equivalent: + + .. code-block:: python + + @app.route("/") + def index(): + ... + + .. code-block:: python + + def index(): + ... + + app.add_url_rule("/", view_func=index) + + See :ref:`url-route-registrations`. + + The endpoint name for the route defaults to the name of the view + function if the ``endpoint`` parameter isn't passed. An error + will be raised if a function has already been registered for the + endpoint. + + The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` is + always added automatically, and ``OPTIONS`` is added + automatically by default. + + ``view_func`` does not necessarily need to be passed, but if the + rule should participate in routing an endpoint name must be + associated with a view function at some point with the + :meth:`endpoint` decorator. + + .. code-block:: python + + app.add_url_rule("/", endpoint="index") + + @app.endpoint("index") + def index(): + ... + + If ``view_func`` has a ``required_methods`` attribute, those + methods are added to the passed and automatic methods. If it + has a ``provide_automatic_methods`` attribute, it is used as the + default if the parameter is not passed. + + :param rule: The URL rule string. + :param endpoint: The endpoint name to associate with the rule + and view function. Used when routing and building URLs. + Defaults to ``view_func.__name__``. + :param view_func: The view function to associate with the + endpoint name. + :param provide_automatic_options: Add the ``OPTIONS`` method and + respond to ``OPTIONS`` requests automatically. + :param options: Extra options passed to the + :class:`~werkzeug.routing.Rule` object. + """ + raise NotImplementedError + + @setupmethod + def endpoint(self, endpoint: str) -> t.Callable[[F], F]: + """Decorate a view function to register it for the given + endpoint. Used if a rule is added without a ``view_func`` with + :meth:`add_url_rule`. + + .. code-block:: python + + app.add_url_rule("/ex", endpoint="example") + + @app.endpoint("example") + def example(): + ... + + :param endpoint: The endpoint name to associate with the view + function. + """ + + def decorator(f: F) -> F: + self.view_functions[endpoint] = f + return f + + return decorator + + @setupmethod + def before_request(self, f: T_before_request) -> T_before_request: + """Register a function to run before each request. + + For example, this can be used to open a database connection, or + to load the logged in user from the session. + + .. code-block:: python + + @app.before_request + def load_user(): + if "user_id" in session: + g.user = db.session.get(session["user_id"]) + + The function will be called without any arguments. If it returns + a non-``None`` value, the value is handled as if it was the + return value from the view, and further request handling is + stopped. + + This is available on both app and blueprint objects. When used on an app, this + executes before every request. When used on a blueprint, this executes before + every request that the blueprint handles. To register with a blueprint and + execute before every request, use :meth:`.Blueprint.before_app_request`. + """ + self.before_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def after_request(self, f: T_after_request) -> T_after_request: + """Register a function to run after each request to this object. + + The function is called with the response object, and must return + a response object. This allows the functions to modify or + replace the response before it is sent. + + If a function raises an exception, any remaining + ``after_request`` functions will not be called. Therefore, this + should not be used for actions that must execute, such as to + close resources. Use :meth:`teardown_request` for that. + + This is available on both app and blueprint objects. When used on an app, this + executes after every request. When used on a blueprint, this executes after + every request that the blueprint handles. To register with a blueprint and + execute after every request, use :meth:`.Blueprint.after_app_request`. + """ + self.after_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def teardown_request(self, f: T_teardown) -> T_teardown: + """Register a function to be called when the request context is + popped. Typically this happens at the end of each request, but + contexts may be pushed manually as well during testing. + + .. code-block:: python + + with app.test_request_context(): + ... + + When the ``with`` block exits (or ``ctx.pop()`` is called), the + teardown functions are called just before the request context is + made inactive. + + When a teardown function was called because of an unhandled + exception it will be passed an error object. If an + :meth:`errorhandler` is registered, it will handle the exception + and the teardown will not receive it. + + Teardown functions must avoid raising exceptions. If they + execute code that might fail they must surround that code with a + ``try``/``except`` block and log any errors. + + The return values of teardown functions are ignored. + + This is available on both app and blueprint objects. When used on an app, this + executes after every request. When used on a blueprint, this executes after + every request that the blueprint handles. To register with a blueprint and + execute after every request, use :meth:`.Blueprint.teardown_app_request`. + """ + self.teardown_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def context_processor( + self, + f: T_template_context_processor, + ) -> T_template_context_processor: + """Registers a template context processor function. These functions run before + rendering a template. The keys of the returned dict are added as variables + available in the template. + + This is available on both app and blueprint objects. When used on an app, this + is called for every rendered template. When used on a blueprint, this is called + for templates rendered from the blueprint's views. To register with a blueprint + and affect every template, use :meth:`.Blueprint.app_context_processor`. + """ + self.template_context_processors[None].append(f) + return f + + @setupmethod + def url_value_preprocessor( + self, + f: T_url_value_preprocessor, + ) -> T_url_value_preprocessor: + """Register a URL value preprocessor function for all view + functions in the application. These functions will be called before the + :meth:`before_request` functions. + + The function can modify the values captured from the matched url before + they are passed to the view. For example, this can be used to pop a + common language code value and place it in ``g`` rather than pass it to + every view. + + The function is passed the endpoint name and values dict. The return + value is ignored. + + This is available on both app and blueprint objects. When used on an app, this + is called for every request. When used on a blueprint, this is called for + requests that the blueprint handles. To register with a blueprint and affect + every request, use :meth:`.Blueprint.app_url_value_preprocessor`. + """ + self.url_value_preprocessors[None].append(f) + return f + + @setupmethod + def url_defaults(self, f: T_url_defaults) -> T_url_defaults: + """Callback function for URL defaults for all view functions of the + application. It's called with the endpoint and values and should + update the values passed in place. + + This is available on both app and blueprint objects. When used on an app, this + is called for every request. When used on a blueprint, this is called for + requests that the blueprint handles. To register with a blueprint and affect + every request, use :meth:`.Blueprint.app_url_defaults`. + """ + self.url_default_functions[None].append(f) + return f + + @setupmethod + def errorhandler( + self, code_or_exception: type[Exception] | int + ) -> t.Callable[[T_error_handler], T_error_handler]: + """Register a function to handle errors by code or exception class. + + A decorator that is used to register a function given an + error code. Example:: + + @app.errorhandler(404) + def page_not_found(error): + return 'This page does not exist', 404 + + You can also register handlers for arbitrary exceptions:: + + @app.errorhandler(DatabaseError) + def special_exception_handler(error): + return 'Database connection failed', 500 + + This is available on both app and blueprint objects. When used on an app, this + can handle errors from every request. When used on a blueprint, this can handle + errors from requests that the blueprint handles. To register with a blueprint + and affect every request, use :meth:`.Blueprint.app_errorhandler`. + + .. versionadded:: 0.7 + Use :meth:`register_error_handler` instead of modifying + :attr:`error_handler_spec` directly, for application wide error + handlers. + + .. versionadded:: 0.7 + One can now additionally also register custom exception types + that do not necessarily have to be a subclass of the + :class:`~werkzeug.exceptions.HTTPException` class. + + :param code_or_exception: the code as integer for the handler, or + an arbitrary exception + """ + + def decorator(f: T_error_handler) -> T_error_handler: + self.register_error_handler(code_or_exception, f) + return f + + return decorator + + @setupmethod + def register_error_handler( + self, + code_or_exception: type[Exception] | int, + f: ft.ErrorHandlerCallable, + ) -> None: + """Alternative error attach function to the :meth:`errorhandler` + decorator that is more straightforward to use for non decorator + usage. + + .. versionadded:: 0.7 + """ + exc_class, code = self._get_exc_class_and_code(code_or_exception) + self.error_handler_spec[None][code][exc_class] = f + + @staticmethod + def _get_exc_class_and_code( + exc_class_or_code: type[Exception] | int, + ) -> tuple[type[Exception], int | None]: + """Get the exception class being handled. For HTTP status codes + or ``HTTPException`` subclasses, return both the exception and + status code. + + :param exc_class_or_code: Any exception class, or an HTTP status + code as an integer. + """ + exc_class: type[Exception] + + if isinstance(exc_class_or_code, int): + try: + exc_class = default_exceptions[exc_class_or_code] + except KeyError: + raise ValueError( + f"'{exc_class_or_code}' is not a recognized HTTP" + " error code. Use a subclass of HTTPException with" + " that code instead." + ) from None + else: + exc_class = exc_class_or_code + + if isinstance(exc_class, Exception): + raise TypeError( + f"{exc_class!r} is an instance, not a class. Handlers" + " can only be registered for Exception classes or HTTP" + " error codes." + ) + + if not issubclass(exc_class, Exception): + raise ValueError( + f"'{exc_class.__name__}' is not a subclass of Exception." + " Handlers can only be registered for Exception classes" + " or HTTP error codes." + ) + + if issubclass(exc_class, HTTPException): + return exc_class, exc_class.code + else: + return exc_class, None + + +def _endpoint_from_view_func(view_func: t.Callable) -> str: + """Internal helper that returns the default endpoint for a given + function. This always is the function name. + """ + assert view_func is not None, "expected view func if endpoint is not provided." + return view_func.__name__ + + +def _path_is_relative_to(path: pathlib.PurePath, base: str) -> bool: + # Path.is_relative_to doesn't exist until Python 3.9 + try: + path.relative_to(base) + return True + except ValueError: + return False + + +def _find_package_path(import_name): + """Find the path that contains the package or module.""" + root_mod_name, _, _ = import_name.partition(".") + + try: + root_spec = importlib.util.find_spec(root_mod_name) + + if root_spec is None: + raise ValueError("not found") + except (ImportError, ValueError): + # ImportError: the machinery told us it does not exist + # ValueError: + # - the module name was invalid + # - the module name is __main__ + # - we raised `ValueError` due to `root_spec` being `None` + return os.getcwd() + + if root_spec.origin in {"namespace", None}: + # namespace package + package_spec = importlib.util.find_spec(import_name) + + if package_spec is not None and package_spec.submodule_search_locations: + # Pick the path in the namespace that contains the submodule. + package_path = pathlib.Path( + os.path.commonpath(package_spec.submodule_search_locations) + ) + search_location = next( + location + for location in root_spec.submodule_search_locations + if _path_is_relative_to(package_path, location) + ) + else: + # Pick the first path. + search_location = root_spec.submodule_search_locations[0] + + return os.path.dirname(search_location) + elif root_spec.submodule_search_locations: + # package with __init__.py + return os.path.dirname(os.path.dirname(root_spec.origin)) + else: + # module + return os.path.dirname(root_spec.origin) + + +def find_package(import_name: str): + """Find the prefix that a package is installed under, and the path + that it would be imported from. + + The prefix is the directory containing the standard directory + hierarchy (lib, bin, etc.). If the package is not installed to the + system (:attr:`sys.prefix`) or a virtualenv (``site-packages``), + ``None`` is returned. + + The path is the entry in :attr:`sys.path` that contains the package + for import. If the package is not installed, it's assumed that the + package was imported from the current working directory. + """ + package_path = _find_package_path(import_name) + py_prefix = os.path.abspath(sys.prefix) + + # installed to the system + if _path_is_relative_to(pathlib.PurePath(package_path), py_prefix): + return py_prefix, package_path + + site_parent, site_folder = os.path.split(package_path) + + # installed to a virtualenv + if site_folder.lower() == "site-packages": + parent, folder = os.path.split(site_parent) + + # Windows (prefix/lib/site-packages) + if folder.lower() == "lib": + return parent, package_path + + # Unix (prefix/lib/pythonX.Y/site-packages) + if os.path.basename(parent).lower() == "lib": + return os.path.dirname(parent), package_path + + # something else (prefix/site-packages) + return site_parent, package_path + + # not installed + return None, package_path diff --git a/backend/test/lib/python3.8/site-packages/flask/sessions.py b/backend/test/lib/python3.8/site-packages/flask/sessions.py new file mode 100644 index 0000000000000000000000000000000000000000..e5650d6862ba235cda3f935e21679cc9d866958c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/sessions.py @@ -0,0 +1,367 @@ +from __future__ import annotations + +import hashlib +import typing as t +from collections.abc import MutableMapping +from datetime import datetime +from datetime import timezone + +from itsdangerous import BadSignature +from itsdangerous import URLSafeTimedSerializer +from werkzeug.datastructures import CallbackDict + +from .json.tag import TaggedJSONSerializer + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + from .wrappers import Request, Response + + +class SessionMixin(MutableMapping): + """Expands a basic dictionary with session attributes.""" + + @property + def permanent(self) -> bool: + """This reflects the ``'_permanent'`` key in the dict.""" + return self.get("_permanent", False) + + @permanent.setter + def permanent(self, value: bool) -> None: + self["_permanent"] = bool(value) + + #: Some implementations can detect whether a session is newly + #: created, but that is not guaranteed. Use with caution. The mixin + # default is hard-coded ``False``. + new = False + + #: Some implementations can detect changes to the session and set + #: this when that happens. The mixin default is hard coded to + #: ``True``. + modified = True + + #: Some implementations can detect when session data is read or + #: written and set this when that happens. The mixin default is hard + #: coded to ``True``. + accessed = True + + +class SecureCookieSession(CallbackDict, SessionMixin): + """Base class for sessions based on signed cookies. + + This session backend will set the :attr:`modified` and + :attr:`accessed` attributes. It cannot reliably track whether a + session is new (vs. empty), so :attr:`new` remains hard coded to + ``False``. + """ + + #: When data is changed, this is set to ``True``. Only the session + #: dictionary itself is tracked; if the session contains mutable + #: data (for example a nested dict) then this must be set to + #: ``True`` manually when modifying that data. The session cookie + #: will only be written to the response if this is ``True``. + modified = False + + #: When data is read or written, this is set to ``True``. Used by + # :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie`` + #: header, which allows caching proxies to cache different pages for + #: different users. + accessed = False + + def __init__(self, initial: t.Any = None) -> None: + def on_update(self) -> None: + self.modified = True + self.accessed = True + + super().__init__(initial, on_update) + + def __getitem__(self, key: str) -> t.Any: + self.accessed = True + return super().__getitem__(key) + + def get(self, key: str, default: t.Any = None) -> t.Any: + self.accessed = True + return super().get(key, default) + + def setdefault(self, key: str, default: t.Any = None) -> t.Any: + self.accessed = True + return super().setdefault(key, default) + + +class NullSession(SecureCookieSession): + """Class used to generate nicer error messages if sessions are not + available. Will still allow read-only access to the empty session + but fail on setting. + """ + + def _fail(self, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: + raise RuntimeError( + "The session is unavailable because no secret " + "key was set. Set the secret_key on the " + "application to something unique and secret." + ) + + __setitem__ = __delitem__ = clear = pop = popitem = update = setdefault = _fail # type: ignore # noqa: B950 + del _fail + + +class SessionInterface: + """The basic interface you have to implement in order to replace the + default session interface which uses werkzeug's securecookie + implementation. The only methods you have to implement are + :meth:`open_session` and :meth:`save_session`, the others have + useful defaults which you don't need to change. + + The session object returned by the :meth:`open_session` method has to + provide a dictionary like interface plus the properties and methods + from the :class:`SessionMixin`. We recommend just subclassing a dict + and adding that mixin:: + + class Session(dict, SessionMixin): + pass + + If :meth:`open_session` returns ``None`` Flask will call into + :meth:`make_null_session` to create a session that acts as replacement + if the session support cannot work because some requirement is not + fulfilled. The default :class:`NullSession` class that is created + will complain that the secret key was not set. + + To replace the session interface on an application all you have to do + is to assign :attr:`flask.Flask.session_interface`:: + + app = Flask(__name__) + app.session_interface = MySessionInterface() + + Multiple requests with the same session may be sent and handled + concurrently. When implementing a new session interface, consider + whether reads or writes to the backing store must be synchronized. + There is no guarantee on the order in which the session for each + request is opened or saved, it will occur in the order that requests + begin and end processing. + + .. versionadded:: 0.8 + """ + + #: :meth:`make_null_session` will look here for the class that should + #: be created when a null session is requested. Likewise the + #: :meth:`is_null_session` method will perform a typecheck against + #: this type. + null_session_class = NullSession + + #: A flag that indicates if the session interface is pickle based. + #: This can be used by Flask extensions to make a decision in regards + #: to how to deal with the session object. + #: + #: .. versionadded:: 0.10 + pickle_based = False + + def make_null_session(self, app: Flask) -> NullSession: + """Creates a null session which acts as a replacement object if the + real session support could not be loaded due to a configuration + error. This mainly aids the user experience because the job of the + null session is to still support lookup without complaining but + modifications are answered with a helpful error message of what + failed. + + This creates an instance of :attr:`null_session_class` by default. + """ + return self.null_session_class() + + def is_null_session(self, obj: object) -> bool: + """Checks if a given object is a null session. Null sessions are + not asked to be saved. + + This checks if the object is an instance of :attr:`null_session_class` + by default. + """ + return isinstance(obj, self.null_session_class) + + def get_cookie_name(self, app: Flask) -> str: + """The name of the session cookie. Uses``app.config["SESSION_COOKIE_NAME"]``.""" + return app.config["SESSION_COOKIE_NAME"] + + def get_cookie_domain(self, app: Flask) -> str | None: + """The value of the ``Domain`` parameter on the session cookie. If not set, + browsers will only send the cookie to the exact domain it was set from. + Otherwise, they will send it to any subdomain of the given value as well. + + Uses the :data:`SESSION_COOKIE_DOMAIN` config. + + .. versionchanged:: 2.3 + Not set by default, does not fall back to ``SERVER_NAME``. + """ + rv = app.config["SESSION_COOKIE_DOMAIN"] + return rv if rv else None + + def get_cookie_path(self, app: Flask) -> str: + """Returns the path for which the cookie should be valid. The + default implementation uses the value from the ``SESSION_COOKIE_PATH`` + config var if it's set, and falls back to ``APPLICATION_ROOT`` or + uses ``/`` if it's ``None``. + """ + return app.config["SESSION_COOKIE_PATH"] or app.config["APPLICATION_ROOT"] + + def get_cookie_httponly(self, app: Flask) -> bool: + """Returns True if the session cookie should be httponly. This + currently just returns the value of the ``SESSION_COOKIE_HTTPONLY`` + config var. + """ + return app.config["SESSION_COOKIE_HTTPONLY"] + + def get_cookie_secure(self, app: Flask) -> bool: + """Returns True if the cookie should be secure. This currently + just returns the value of the ``SESSION_COOKIE_SECURE`` setting. + """ + return app.config["SESSION_COOKIE_SECURE"] + + def get_cookie_samesite(self, app: Flask) -> str: + """Return ``'Strict'`` or ``'Lax'`` if the cookie should use the + ``SameSite`` attribute. This currently just returns the value of + the :data:`SESSION_COOKIE_SAMESITE` setting. + """ + return app.config["SESSION_COOKIE_SAMESITE"] + + def get_expiration_time(self, app: Flask, session: SessionMixin) -> datetime | None: + """A helper method that returns an expiration date for the session + or ``None`` if the session is linked to the browser session. The + default implementation returns now + the permanent session + lifetime configured on the application. + """ + if session.permanent: + return datetime.now(timezone.utc) + app.permanent_session_lifetime + return None + + def should_set_cookie(self, app: Flask, session: SessionMixin) -> bool: + """Used by session backends to determine if a ``Set-Cookie`` header + should be set for this session cookie for this response. If the session + has been modified, the cookie is set. If the session is permanent and + the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is + always set. + + This check is usually skipped if the session was deleted. + + .. versionadded:: 0.11 + """ + + return session.modified or ( + session.permanent and app.config["SESSION_REFRESH_EACH_REQUEST"] + ) + + def open_session(self, app: Flask, request: Request) -> SessionMixin | None: + """This is called at the beginning of each request, after + pushing the request context, before matching the URL. + + This must return an object which implements a dictionary-like + interface as well as the :class:`SessionMixin` interface. + + This will return ``None`` to indicate that loading failed in + some way that is not immediately an error. The request + context will fall back to using :meth:`make_null_session` + in this case. + """ + raise NotImplementedError() + + def save_session( + self, app: Flask, session: SessionMixin, response: Response + ) -> None: + """This is called at the end of each request, after generating + a response, before removing the request context. It is skipped + if :meth:`is_null_session` returns ``True``. + """ + raise NotImplementedError() + + +session_json_serializer = TaggedJSONSerializer() + + +class SecureCookieSessionInterface(SessionInterface): + """The default session interface that stores sessions in signed cookies + through the :mod:`itsdangerous` module. + """ + + #: the salt that should be applied on top of the secret key for the + #: signing of cookie based sessions. + salt = "cookie-session" + #: the hash function to use for the signature. The default is sha1 + digest_method = staticmethod(hashlib.sha1) + #: the name of the itsdangerous supported key derivation. The default + #: is hmac. + key_derivation = "hmac" + #: A python serializer for the payload. The default is a compact + #: JSON derived serializer with support for some extra Python types + #: such as datetime objects or tuples. + serializer = session_json_serializer + session_class = SecureCookieSession + + def get_signing_serializer(self, app: Flask) -> URLSafeTimedSerializer | None: + if not app.secret_key: + return None + signer_kwargs = dict( + key_derivation=self.key_derivation, digest_method=self.digest_method + ) + return URLSafeTimedSerializer( + app.secret_key, + salt=self.salt, + serializer=self.serializer, + signer_kwargs=signer_kwargs, + ) + + def open_session(self, app: Flask, request: Request) -> SecureCookieSession | None: + s = self.get_signing_serializer(app) + if s is None: + return None + val = request.cookies.get(self.get_cookie_name(app)) + if not val: + return self.session_class() + max_age = int(app.permanent_session_lifetime.total_seconds()) + try: + data = s.loads(val, max_age=max_age) + return self.session_class(data) + except BadSignature: + return self.session_class() + + def save_session( + self, app: Flask, session: SessionMixin, response: Response + ) -> None: + name = self.get_cookie_name(app) + domain = self.get_cookie_domain(app) + path = self.get_cookie_path(app) + secure = self.get_cookie_secure(app) + samesite = self.get_cookie_samesite(app) + httponly = self.get_cookie_httponly(app) + + # Add a "Vary: Cookie" header if the session was accessed at all. + if session.accessed: + response.vary.add("Cookie") + + # If the session is modified to be empty, remove the cookie. + # If the session is empty, return without setting the cookie. + if not session: + if session.modified: + response.delete_cookie( + name, + domain=domain, + path=path, + secure=secure, + samesite=samesite, + httponly=httponly, + ) + response.vary.add("Cookie") + + return + + if not self.should_set_cookie(app, session): + return + + expires = self.get_expiration_time(app, session) + val = self.get_signing_serializer(app).dumps(dict(session)) # type: ignore + response.set_cookie( + name, + val, # type: ignore + expires=expires, + httponly=httponly, + domain=domain, + path=path, + secure=secure, + samesite=samesite, + ) + response.vary.add("Cookie") diff --git a/backend/test/lib/python3.8/site-packages/flask/signals.py b/backend/test/lib/python3.8/site-packages/flask/signals.py new file mode 100644 index 0000000000000000000000000000000000000000..d79f21f9396026c18428b149edd67d1debbeb07b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/signals.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +import typing as t +import warnings + +from blinker import Namespace + +# This namespace is only for signals provided by Flask itself. +_signals = Namespace() + +template_rendered = _signals.signal("template-rendered") +before_render_template = _signals.signal("before-render-template") +request_started = _signals.signal("request-started") +request_finished = _signals.signal("request-finished") +request_tearing_down = _signals.signal("request-tearing-down") +got_request_exception = _signals.signal("got-request-exception") +appcontext_tearing_down = _signals.signal("appcontext-tearing-down") +appcontext_pushed = _signals.signal("appcontext-pushed") +appcontext_popped = _signals.signal("appcontext-popped") +message_flashed = _signals.signal("message-flashed") + + +def __getattr__(name: str) -> t.Any: + if name == "signals_available": + warnings.warn( + "The 'signals_available' attribute is deprecated and will be removed in" + " Flask 2.4. Signals are always available.", + DeprecationWarning, + stacklevel=2, + ) + return True + + raise AttributeError(name) diff --git a/backend/test/lib/python3.8/site-packages/flask/templating.py b/backend/test/lib/python3.8/site-packages/flask/templating.py new file mode 100644 index 0000000000000000000000000000000000000000..769108f755229f6e5785718e2e48836f9b58223c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/templating.py @@ -0,0 +1,220 @@ +from __future__ import annotations + +import typing as t + +from jinja2 import BaseLoader +from jinja2 import Environment as BaseEnvironment +from jinja2 import Template +from jinja2 import TemplateNotFound + +from .globals import _cv_app +from .globals import _cv_request +from .globals import current_app +from .globals import request +from .helpers import stream_with_context +from .signals import before_render_template +from .signals import template_rendered + +if t.TYPE_CHECKING: # pragma: no cover + from .app import Flask + from .scaffold import Scaffold + + +def _default_template_ctx_processor() -> dict[str, t.Any]: + """Default template context processor. Injects `request`, + `session` and `g`. + """ + appctx = _cv_app.get(None) + reqctx = _cv_request.get(None) + rv: dict[str, t.Any] = {} + if appctx is not None: + rv["g"] = appctx.g + if reqctx is not None: + rv["request"] = reqctx.request + rv["session"] = reqctx.session + return rv + + +class Environment(BaseEnvironment): + """Works like a regular Jinja2 environment but has some additional + knowledge of how Flask's blueprint works so that it can prepend the + name of the blueprint to referenced templates if necessary. + """ + + def __init__(self, app: Flask, **options: t.Any) -> None: + if "loader" not in options: + options["loader"] = app.create_global_jinja_loader() + BaseEnvironment.__init__(self, **options) + self.app = app + + +class DispatchingJinjaLoader(BaseLoader): + """A loader that looks for templates in the application and all + the blueprint folders. + """ + + def __init__(self, app: Flask) -> None: + self.app = app + + def get_source( # type: ignore + self, environment: Environment, template: str + ) -> tuple[str, str | None, t.Callable | None]: + if self.app.config["EXPLAIN_TEMPLATE_LOADING"]: + return self._get_source_explained(environment, template) + return self._get_source_fast(environment, template) + + def _get_source_explained( + self, environment: Environment, template: str + ) -> tuple[str, str | None, t.Callable | None]: + attempts = [] + rv: tuple[str, str | None, t.Callable[[], bool] | None] | None + trv: None | (tuple[str, str | None, t.Callable[[], bool] | None]) = None + + for srcobj, loader in self._iter_loaders(template): + try: + rv = loader.get_source(environment, template) + if trv is None: + trv = rv + except TemplateNotFound: + rv = None + attempts.append((loader, srcobj, rv)) + + from .debughelpers import explain_template_loading_attempts + + explain_template_loading_attempts(self.app, template, attempts) + + if trv is not None: + return trv + raise TemplateNotFound(template) + + def _get_source_fast( + self, environment: Environment, template: str + ) -> tuple[str, str | None, t.Callable | None]: + for _srcobj, loader in self._iter_loaders(template): + try: + return loader.get_source(environment, template) + except TemplateNotFound: + continue + raise TemplateNotFound(template) + + def _iter_loaders( + self, template: str + ) -> t.Generator[tuple[Scaffold, BaseLoader], None, None]: + loader = self.app.jinja_loader + if loader is not None: + yield self.app, loader + + for blueprint in self.app.iter_blueprints(): + loader = blueprint.jinja_loader + if loader is not None: + yield blueprint, loader + + def list_templates(self) -> list[str]: + result = set() + loader = self.app.jinja_loader + if loader is not None: + result.update(loader.list_templates()) + + for blueprint in self.app.iter_blueprints(): + loader = blueprint.jinja_loader + if loader is not None: + for template in loader.list_templates(): + result.add(template) + + return list(result) + + +def _render(app: Flask, template: Template, context: dict[str, t.Any]) -> str: + app.update_template_context(context) + before_render_template.send( + app, _async_wrapper=app.ensure_sync, template=template, context=context + ) + rv = template.render(context) + template_rendered.send( + app, _async_wrapper=app.ensure_sync, template=template, context=context + ) + return rv + + +def render_template( + template_name_or_list: str | Template | list[str | Template], + **context: t.Any, +) -> str: + """Render a template by name with the given context. + + :param template_name_or_list: The name of the template to render. If + a list is given, the first name to exist will be rendered. + :param context: The variables to make available in the template. + """ + app = current_app._get_current_object() # type: ignore[attr-defined] + template = app.jinja_env.get_or_select_template(template_name_or_list) + return _render(app, template, context) + + +def render_template_string(source: str, **context: t.Any) -> str: + """Render a template from the given source string with the given + context. + + :param source: The source code of the template to render. + :param context: The variables to make available in the template. + """ + app = current_app._get_current_object() # type: ignore[attr-defined] + template = app.jinja_env.from_string(source) + return _render(app, template, context) + + +def _stream( + app: Flask, template: Template, context: dict[str, t.Any] +) -> t.Iterator[str]: + app.update_template_context(context) + before_render_template.send( + app, _async_wrapper=app.ensure_sync, template=template, context=context + ) + + def generate() -> t.Iterator[str]: + yield from template.generate(context) + template_rendered.send( + app, _async_wrapper=app.ensure_sync, template=template, context=context + ) + + rv = generate() + + # If a request context is active, keep it while generating. + if request: + rv = stream_with_context(rv) + + return rv + + +def stream_template( + template_name_or_list: str | Template | list[str | Template], + **context: t.Any, +) -> t.Iterator[str]: + """Render a template by name with the given context as a stream. + This returns an iterator of strings, which can be used as a + streaming response from a view. + + :param template_name_or_list: The name of the template to render. If + a list is given, the first name to exist will be rendered. + :param context: The variables to make available in the template. + + .. versionadded:: 2.2 + """ + app = current_app._get_current_object() # type: ignore[attr-defined] + template = app.jinja_env.get_or_select_template(template_name_or_list) + return _stream(app, template, context) + + +def stream_template_string(source: str, **context: t.Any) -> t.Iterator[str]: + """Render a template from the given source string with the given + context as a stream. This returns an iterator of strings, which can + be used as a streaming response from a view. + + :param source: The source code of the template to render. + :param context: The variables to make available in the template. + + .. versionadded:: 2.2 + """ + app = current_app._get_current_object() # type: ignore[attr-defined] + template = app.jinja_env.from_string(source) + return _stream(app, template, context) diff --git a/backend/test/lib/python3.8/site-packages/flask/testing.py b/backend/test/lib/python3.8/site-packages/flask/testing.py new file mode 100644 index 0000000000000000000000000000000000000000..69aa7851889fe02cbb5ee3649c32ed8a950a5280 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/testing.py @@ -0,0 +1,295 @@ +from __future__ import annotations + +import importlib.metadata +import typing as t +from contextlib import contextmanager +from contextlib import ExitStack +from copy import copy +from types import TracebackType +from urllib.parse import urlsplit + +import werkzeug.test +from click.testing import CliRunner +from werkzeug.test import Client +from werkzeug.wrappers import Request as BaseRequest + +from .cli import ScriptInfo +from .sessions import SessionMixin + +if t.TYPE_CHECKING: # pragma: no cover + from werkzeug.test import TestResponse + + from .app import Flask + + +class EnvironBuilder(werkzeug.test.EnvironBuilder): + """An :class:`~werkzeug.test.EnvironBuilder`, that takes defaults from the + application. + + :param app: The Flask application to configure the environment from. + :param path: URL path being requested. + :param base_url: Base URL where the app is being served, which + ``path`` is relative to. If not given, built from + :data:`PREFERRED_URL_SCHEME`, ``subdomain``, + :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. + :param subdomain: Subdomain name to append to :data:`SERVER_NAME`. + :param url_scheme: Scheme to use instead of + :data:`PREFERRED_URL_SCHEME`. + :param json: If given, this is serialized as JSON and passed as + ``data``. Also defaults ``content_type`` to + ``application/json``. + :param args: other positional arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + :param kwargs: other keyword arguments passed to + :class:`~werkzeug.test.EnvironBuilder`. + """ + + def __init__( + self, + app: Flask, + path: str = "/", + base_url: str | None = None, + subdomain: str | None = None, + url_scheme: str | None = None, + *args: t.Any, + **kwargs: t.Any, + ) -> None: + assert not (base_url or subdomain or url_scheme) or ( + base_url is not None + ) != bool( + subdomain or url_scheme + ), 'Cannot pass "subdomain" or "url_scheme" with "base_url".' + + if base_url is None: + http_host = app.config.get("SERVER_NAME") or "localhost" + app_root = app.config["APPLICATION_ROOT"] + + if subdomain: + http_host = f"{subdomain}.{http_host}" + + if url_scheme is None: + url_scheme = app.config["PREFERRED_URL_SCHEME"] + + url = urlsplit(path) + base_url = ( + f"{url.scheme or url_scheme}://{url.netloc or http_host}" + f"/{app_root.lstrip('/')}" + ) + path = url.path + + if url.query: + sep = b"?" if isinstance(url.query, bytes) else "?" + path += sep + url.query + + self.app = app + super().__init__(path, base_url, *args, **kwargs) + + def json_dumps(self, obj: t.Any, **kwargs: t.Any) -> str: # type: ignore + """Serialize ``obj`` to a JSON-formatted string. + + The serialization will be configured according to the config associated + with this EnvironBuilder's ``app``. + """ + return self.app.json.dumps(obj, **kwargs) + + +_werkzeug_version = "" + + +def _get_werkzeug_version() -> str: + global _werkzeug_version + + if not _werkzeug_version: + _werkzeug_version = importlib.metadata.version("werkzeug") + + return _werkzeug_version + + +class FlaskClient(Client): + """Works like a regular Werkzeug test client but has knowledge about + Flask's contexts to defer the cleanup of the request context until + the end of a ``with`` block. For general information about how to + use this class refer to :class:`werkzeug.test.Client`. + + .. versionchanged:: 0.12 + `app.test_client()` includes preset default environment, which can be + set after instantiation of the `app.test_client()` object in + `client.environ_base`. + + Basic usage is outlined in the :doc:`/testing` chapter. + """ + + application: Flask + + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + super().__init__(*args, **kwargs) + self.preserve_context = False + self._new_contexts: list[t.ContextManager[t.Any]] = [] + self._context_stack = ExitStack() + self.environ_base = { + "REMOTE_ADDR": "127.0.0.1", + "HTTP_USER_AGENT": f"Werkzeug/{_get_werkzeug_version()}", + } + + @contextmanager + def session_transaction( + self, *args: t.Any, **kwargs: t.Any + ) -> t.Generator[SessionMixin, None, None]: + """When used in combination with a ``with`` statement this opens a + session transaction. This can be used to modify the session that + the test client uses. Once the ``with`` block is left the session is + stored back. + + :: + + with client.session_transaction() as session: + session['value'] = 42 + + Internally this is implemented by going through a temporary test + request context and since session handling could depend on + request variables this function accepts the same arguments as + :meth:`~flask.Flask.test_request_context` which are directly + passed through. + """ + if self._cookies is None: + raise TypeError( + "Cookies are disabled. Create a client with 'use_cookies=True'." + ) + + app = self.application + ctx = app.test_request_context(*args, **kwargs) + self._add_cookies_to_wsgi(ctx.request.environ) + + with ctx: + sess = app.session_interface.open_session(app, ctx.request) + + if sess is None: + raise RuntimeError("Session backend did not open a session.") + + yield sess + resp = app.response_class() + + if app.session_interface.is_null_session(sess): + return + + with ctx: + app.session_interface.save_session(app, sess, resp) + + self._update_cookies_from_response( + ctx.request.host.partition(":")[0], + ctx.request.path, + resp.headers.getlist("Set-Cookie"), + ) + + def _copy_environ(self, other): + out = {**self.environ_base, **other} + + if self.preserve_context: + out["werkzeug.debug.preserve_context"] = self._new_contexts.append + + return out + + def _request_from_builder_args(self, args, kwargs): + kwargs["environ_base"] = self._copy_environ(kwargs.get("environ_base", {})) + builder = EnvironBuilder(self.application, *args, **kwargs) + + try: + return builder.get_request() + finally: + builder.close() + + def open( + self, + *args: t.Any, + buffered: bool = False, + follow_redirects: bool = False, + **kwargs: t.Any, + ) -> TestResponse: + if args and isinstance( + args[0], (werkzeug.test.EnvironBuilder, dict, BaseRequest) + ): + if isinstance(args[0], werkzeug.test.EnvironBuilder): + builder = copy(args[0]) + builder.environ_base = self._copy_environ(builder.environ_base or {}) + request = builder.get_request() + elif isinstance(args[0], dict): + request = EnvironBuilder.from_environ( + args[0], app=self.application, environ_base=self._copy_environ({}) + ).get_request() + else: + # isinstance(args[0], BaseRequest) + request = copy(args[0]) + request.environ = self._copy_environ(request.environ) + else: + # request is None + request = self._request_from_builder_args(args, kwargs) + + # Pop any previously preserved contexts. This prevents contexts + # from being preserved across redirects or multiple requests + # within a single block. + self._context_stack.close() + + response = super().open( + request, + buffered=buffered, + follow_redirects=follow_redirects, + ) + response.json_module = self.application.json # type: ignore[assignment] + + # Re-push contexts that were preserved during the request. + while self._new_contexts: + cm = self._new_contexts.pop() + self._context_stack.enter_context(cm) + + return response + + def __enter__(self) -> FlaskClient: + if self.preserve_context: + raise RuntimeError("Cannot nest client invocations") + self.preserve_context = True + return self + + def __exit__( + self, + exc_type: type | None, + exc_value: BaseException | None, + tb: TracebackType | None, + ) -> None: + self.preserve_context = False + self._context_stack.close() + + +class FlaskCliRunner(CliRunner): + """A :class:`~click.testing.CliRunner` for testing a Flask app's + CLI commands. Typically created using + :meth:`~flask.Flask.test_cli_runner`. See :ref:`testing-cli`. + """ + + def __init__(self, app: Flask, **kwargs: t.Any) -> None: + self.app = app + super().__init__(**kwargs) + + def invoke( # type: ignore + self, cli: t.Any = None, args: t.Any = None, **kwargs: t.Any + ) -> t.Any: + """Invokes a CLI command in an isolated environment. See + :meth:`CliRunner.invoke <click.testing.CliRunner.invoke>` for + full method documentation. See :ref:`testing-cli` for examples. + + If the ``obj`` argument is not given, passes an instance of + :class:`~flask.cli.ScriptInfo` that knows how to load the Flask + app being tested. + + :param cli: Command object to invoke. Default is the app's + :attr:`~flask.app.Flask.cli` group. + :param args: List of strings to invoke the command with. + + :return: a :class:`~click.testing.Result` object. + """ + if cli is None: + cli = self.app.cli # type: ignore + + if "obj" not in kwargs: + kwargs["obj"] = ScriptInfo(create_app=lambda: self.app) + + return super().invoke(cli, args, **kwargs) diff --git a/backend/test/lib/python3.8/site-packages/flask/typing.py b/backend/test/lib/python3.8/site-packages/flask/typing.py new file mode 100644 index 0000000000000000000000000000000000000000..50aef7f463361f2f2d10096e5472ef9768cab743 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/typing.py @@ -0,0 +1,82 @@ +from __future__ import annotations + +import typing as t + +if t.TYPE_CHECKING: # pragma: no cover + from _typeshed.wsgi import WSGIApplication # noqa: F401 + from werkzeug.datastructures import Headers # noqa: F401 + from werkzeug.wrappers import Response # noqa: F401 + +# The possible types that are directly convertible or are a Response object. +ResponseValue = t.Union[ + "Response", + str, + bytes, + t.List[t.Any], + # Only dict is actually accepted, but Mapping allows for TypedDict. + t.Mapping[str, t.Any], + t.Iterator[str], + t.Iterator[bytes], +] + +# the possible types for an individual HTTP header +# This should be a Union, but mypy doesn't pass unless it's a TypeVar. +HeaderValue = t.Union[str, t.List[str], t.Tuple[str, ...]] + +# the possible types for HTTP headers +HeadersValue = t.Union[ + "Headers", + t.Mapping[str, HeaderValue], + t.Sequence[t.Tuple[str, HeaderValue]], +] + +# The possible types returned by a route function. +ResponseReturnValue = t.Union[ + ResponseValue, + t.Tuple[ResponseValue, HeadersValue], + t.Tuple[ResponseValue, int], + t.Tuple[ResponseValue, int, HeadersValue], + "WSGIApplication", +] + +# Allow any subclass of werkzeug.Response, such as the one from Flask, +# as a callback argument. Using werkzeug.Response directly makes a +# callback annotated with flask.Response fail type checking. +ResponseClass = t.TypeVar("ResponseClass", bound="Response") + +AppOrBlueprintKey = t.Optional[str] # The App key is None, whereas blueprints are named +AfterRequestCallable = t.Union[ + t.Callable[[ResponseClass], ResponseClass], + t.Callable[[ResponseClass], t.Awaitable[ResponseClass]], +] +BeforeFirstRequestCallable = t.Union[ + t.Callable[[], None], t.Callable[[], t.Awaitable[None]] +] +BeforeRequestCallable = t.Union[ + t.Callable[[], t.Optional[ResponseReturnValue]], + t.Callable[[], t.Awaitable[t.Optional[ResponseReturnValue]]], +] +ShellContextProcessorCallable = t.Callable[[], t.Dict[str, t.Any]] +TeardownCallable = t.Union[ + t.Callable[[t.Optional[BaseException]], None], + t.Callable[[t.Optional[BaseException]], t.Awaitable[None]], +] +TemplateContextProcessorCallable = t.Callable[[], t.Dict[str, t.Any]] +TemplateFilterCallable = t.Callable[..., t.Any] +TemplateGlobalCallable = t.Callable[..., t.Any] +TemplateTestCallable = t.Callable[..., bool] +URLDefaultCallable = t.Callable[[str, dict], None] +URLValuePreprocessorCallable = t.Callable[[t.Optional[str], t.Optional[dict]], None] + +# This should take Exception, but that either breaks typing the argument +# with a specific exception, or decorating multiple times with different +# exceptions (and using a union type on the argument). +# https://github.com/pallets/flask/issues/4095 +# https://github.com/pallets/flask/issues/4295 +# https://github.com/pallets/flask/issues/4297 +ErrorHandlerCallable = t.Callable[[t.Any], ResponseReturnValue] + +RouteCallable = t.Union[ + t.Callable[..., ResponseReturnValue], + t.Callable[..., t.Awaitable[ResponseReturnValue]], +] diff --git a/backend/test/lib/python3.8/site-packages/flask/views.py b/backend/test/lib/python3.8/site-packages/flask/views.py new file mode 100644 index 0000000000000000000000000000000000000000..c7a2b621c89604cdd2f1278404c2d01ff1932083 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/views.py @@ -0,0 +1,190 @@ +from __future__ import annotations + +import typing as t + +from . import typing as ft +from .globals import current_app +from .globals import request + + +http_method_funcs = frozenset( + ["get", "post", "head", "options", "delete", "put", "trace", "patch"] +) + + +class View: + """Subclass this class and override :meth:`dispatch_request` to + create a generic class-based view. Call :meth:`as_view` to create a + view function that creates an instance of the class with the given + arguments and calls its ``dispatch_request`` method with any URL + variables. + + See :doc:`views` for a detailed guide. + + .. code-block:: python + + class Hello(View): + init_every_request = False + + def dispatch_request(self, name): + return f"Hello, {name}!" + + app.add_url_rule( + "/hello/<name>", view_func=Hello.as_view("hello") + ) + + Set :attr:`methods` on the class to change what methods the view + accepts. + + Set :attr:`decorators` on the class to apply a list of decorators to + the generated view function. Decorators applied to the class itself + will not be applied to the generated view function! + + Set :attr:`init_every_request` to ``False`` for efficiency, unless + you need to store request-global data on ``self``. + """ + + #: The methods this view is registered for. Uses the same default + #: (``["GET", "HEAD", "OPTIONS"]``) as ``route`` and + #: ``add_url_rule`` by default. + methods: t.ClassVar[t.Collection[str] | None] = None + + #: Control whether the ``OPTIONS`` method is handled automatically. + #: Uses the same default (``True``) as ``route`` and + #: ``add_url_rule`` by default. + provide_automatic_options: t.ClassVar[bool | None] = None + + #: A list of decorators to apply, in order, to the generated view + #: function. Remember that ``@decorator`` syntax is applied bottom + #: to top, so the first decorator in the list would be the bottom + #: decorator. + #: + #: .. versionadded:: 0.8 + decorators: t.ClassVar[list[t.Callable]] = [] + + #: Create a new instance of this view class for every request by + #: default. If a view subclass sets this to ``False``, the same + #: instance is used for every request. + #: + #: A single instance is more efficient, especially if complex setup + #: is done during init. However, storing data on ``self`` is no + #: longer safe across requests, and :data:`~flask.g` should be used + #: instead. + #: + #: .. versionadded:: 2.2 + init_every_request: t.ClassVar[bool] = True + + def dispatch_request(self) -> ft.ResponseReturnValue: + """The actual view function behavior. Subclasses must override + this and return a valid response. Any variables from the URL + rule are passed as keyword arguments. + """ + raise NotImplementedError() + + @classmethod + def as_view( + cls, name: str, *class_args: t.Any, **class_kwargs: t.Any + ) -> ft.RouteCallable: + """Convert the class into a view function that can be registered + for a route. + + By default, the generated view will create a new instance of the + view class for every request and call its + :meth:`dispatch_request` method. If the view class sets + :attr:`init_every_request` to ``False``, the same instance will + be used for every request. + + Except for ``name``, all other arguments passed to this method + are forwarded to the view class ``__init__`` method. + + .. versionchanged:: 2.2 + Added the ``init_every_request`` class attribute. + """ + if cls.init_every_request: + + def view(**kwargs: t.Any) -> ft.ResponseReturnValue: + self = view.view_class( # type: ignore[attr-defined] + *class_args, **class_kwargs + ) + return current_app.ensure_sync(self.dispatch_request)(**kwargs) + + else: + self = cls(*class_args, **class_kwargs) + + def view(**kwargs: t.Any) -> ft.ResponseReturnValue: + return current_app.ensure_sync(self.dispatch_request)(**kwargs) + + if cls.decorators: + view.__name__ = name + view.__module__ = cls.__module__ + for decorator in cls.decorators: + view = decorator(view) + + # We attach the view class to the view function for two reasons: + # first of all it allows us to easily figure out what class-based + # view this thing came from, secondly it's also used for instantiating + # the view class so you can actually replace it with something else + # for testing purposes and debugging. + view.view_class = cls # type: ignore + view.__name__ = name + view.__doc__ = cls.__doc__ + view.__module__ = cls.__module__ + view.methods = cls.methods # type: ignore + view.provide_automatic_options = cls.provide_automatic_options # type: ignore + return view + + +class MethodView(View): + """Dispatches request methods to the corresponding instance methods. + For example, if you implement a ``get`` method, it will be used to + handle ``GET`` requests. + + This can be useful for defining a REST API. + + :attr:`methods` is automatically set based on the methods defined on + the class. + + See :doc:`views` for a detailed guide. + + .. code-block:: python + + class CounterAPI(MethodView): + def get(self): + return str(session.get("counter", 0)) + + def post(self): + session["counter"] = session.get("counter", 0) + 1 + return redirect(url_for("counter")) + + app.add_url_rule( + "/counter", view_func=CounterAPI.as_view("counter") + ) + """ + + def __init_subclass__(cls, **kwargs: t.Any) -> None: + super().__init_subclass__(**kwargs) + + if "methods" not in cls.__dict__: + methods = set() + + for base in cls.__bases__: + if getattr(base, "methods", None): + methods.update(base.methods) # type: ignore[attr-defined] + + for key in http_method_funcs: + if hasattr(cls, key): + methods.add(key.upper()) + + if methods: + cls.methods = methods + + def dispatch_request(self, **kwargs: t.Any) -> ft.ResponseReturnValue: + meth = getattr(self, request.method.lower(), None) + + # If the request method is HEAD and we don't have a handler for it + # retry with GET. + if meth is None and request.method == "HEAD": + meth = getattr(self, "get", None) + + assert meth is not None, f"Unimplemented method {request.method!r}" + return current_app.ensure_sync(meth)(**kwargs) diff --git a/backend/test/lib/python3.8/site-packages/flask/wrappers.py b/backend/test/lib/python3.8/site-packages/flask/wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..ef7aa38c0ebe8617d8015ec6b9ee7f4b30596424 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/flask/wrappers.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import typing as t + +from werkzeug.exceptions import BadRequest +from werkzeug.wrappers import Request as RequestBase +from werkzeug.wrappers import Response as ResponseBase + +from . import json +from .globals import current_app +from .helpers import _split_blueprint_path + +if t.TYPE_CHECKING: # pragma: no cover + from werkzeug.routing import Rule + + +class Request(RequestBase): + """The request object used by default in Flask. Remembers the + matched endpoint and view arguments. + + It is what ends up as :class:`~flask.request`. If you want to replace + the request object used you can subclass this and set + :attr:`~flask.Flask.request_class` to your subclass. + + The request object is a :class:`~werkzeug.wrappers.Request` subclass and + provides all of the attributes Werkzeug defines plus a few Flask + specific ones. + """ + + json_module: t.Any = json + + #: The internal URL rule that matched the request. This can be + #: useful to inspect which methods are allowed for the URL from + #: a before/after handler (``request.url_rule.methods``) etc. + #: Though if the request's method was invalid for the URL rule, + #: the valid list is available in ``routing_exception.valid_methods`` + #: instead (an attribute of the Werkzeug exception + #: :exc:`~werkzeug.exceptions.MethodNotAllowed`) + #: because the request was never internally bound. + #: + #: .. versionadded:: 0.6 + url_rule: Rule | None = None + + #: A dict of view arguments that matched the request. If an exception + #: happened when matching, this will be ``None``. + view_args: dict[str, t.Any] | None = None + + #: If matching the URL failed, this is the exception that will be + #: raised / was raised as part of the request handling. This is + #: usually a :exc:`~werkzeug.exceptions.NotFound` exception or + #: something similar. + routing_exception: Exception | None = None + + @property + def max_content_length(self) -> int | None: # type: ignore + """Read-only view of the ``MAX_CONTENT_LENGTH`` config key.""" + if current_app: + return current_app.config["MAX_CONTENT_LENGTH"] + else: + return None + + @property + def endpoint(self) -> str | None: + """The endpoint that matched the request URL. + + This will be ``None`` if matching failed or has not been + performed yet. + + This in combination with :attr:`view_args` can be used to + reconstruct the same URL or a modified URL. + """ + if self.url_rule is not None: + return self.url_rule.endpoint + + return None + + @property + def blueprint(self) -> str | None: + """The registered name of the current blueprint. + + This will be ``None`` if the endpoint is not part of a + blueprint, or if URL matching failed or has not been performed + yet. + + This does not necessarily match the name the blueprint was + created with. It may have been nested, or registered with a + different name. + """ + endpoint = self.endpoint + + if endpoint is not None and "." in endpoint: + return endpoint.rpartition(".")[0] + + return None + + @property + def blueprints(self) -> list[str]: + """The registered names of the current blueprint upwards through + parent blueprints. + + This will be an empty list if there is no current blueprint, or + if URL matching failed. + + .. versionadded:: 2.0.1 + """ + name = self.blueprint + + if name is None: + return [] + + return _split_blueprint_path(name) + + def _load_form_data(self) -> None: + super()._load_form_data() + + # In debug mode we're replacing the files multidict with an ad-hoc + # subclass that raises a different error for key errors. + if ( + current_app + and current_app.debug + and self.mimetype != "multipart/form-data" + and not self.files + ): + from .debughelpers import attach_enctype_error_multidict + + attach_enctype_error_multidict(self) + + def on_json_loading_failed(self, e: ValueError | None) -> t.Any: + try: + return super().on_json_loading_failed(e) + except BadRequest as e: + if current_app and current_app.debug: + raise + + raise BadRequest() from e + + +class Response(ResponseBase): + """The response object that is used by default in Flask. Works like the + response object from Werkzeug but is set to have an HTML mimetype by + default. Quite often you don't have to create this object yourself because + :meth:`~flask.Flask.make_response` will take care of that for you. + + If you want to replace the response object used you can subclass this and + set :attr:`~flask.Flask.response_class` to your subclass. + + .. versionchanged:: 1.0 + JSON support is added to the response, like the request. This is useful + when testing to get the test client response data as JSON. + + .. versionchanged:: 1.0 + + Added :attr:`max_cookie_size`. + """ + + default_mimetype: str | None = "text/html" + + json_module = json + + autocorrect_location_header = False + + @property + def max_cookie_size(self) -> int: # type: ignore + """Read-only view of the :data:`MAX_COOKIE_SIZE` config key. + + See :attr:`~werkzeug.wrappers.Response.max_cookie_size` in + Werkzeug's docs. + """ + if current_app: + return current_app.config["MAX_COOKIE_SIZE"] + + # return Werkzeug's default when not in an app context + return super().max_cookie_size diff --git a/backend/test/lib/python3.8/site-packages/gridfs/__init__.py b/backend/test/lib/python3.8/site-packages/gridfs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9a4cda552721edfb74509cf9d7f9f9f25c72ade0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/gridfs/__init__.py @@ -0,0 +1,1020 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GridFS is a specification for storing large objects in Mongo. + +The :mod:`gridfs` package is an implementation of GridFS on top of +:mod:`pymongo`, exposing a file-like interface. + +.. seealso:: The MongoDB documentation on `gridfs <https://dochub.mongodb.org/core/gridfs>`_. +""" + +from collections import abc +from typing import Any, List, Mapping, Optional, cast + +from bson.objectid import ObjectId +from gridfs.errors import NoFile +from gridfs.grid_file import ( + DEFAULT_CHUNK_SIZE, + GridIn, + GridOut, + GridOutCursor, + _clear_entity_type_registry, + _disallow_transactions, +) +from pymongo import ASCENDING, DESCENDING, _csot +from pymongo.client_session import ClientSession +from pymongo.collection import Collection +from pymongo.common import validate_string +from pymongo.database import Database +from pymongo.errors import ConfigurationError +from pymongo.read_preferences import _ServerMode +from pymongo.write_concern import WriteConcern + +__all__ = [ + "GridFS", + "GridFSBucket", + "NoFile", + "DEFAULT_CHUNK_SIZE", + "GridIn", + "GridOut", + "GridOutCursor", +] + + +class GridFS: + """An instance of GridFS on top of a single Database.""" + + def __init__(self, database: Database, collection: str = "fs"): + """Create a new instance of :class:`GridFS`. + + Raises :class:`TypeError` if `database` is not an instance of + :class:`~pymongo.database.Database`. + + :Parameters: + - `database`: database to use + - `collection` (optional): root collection to use + + .. versionchanged:: 4.0 + Removed the `disable_md5` parameter. See + :ref:`removed-gridfs-checksum` for details. + + .. versionchanged:: 3.11 + Running a GridFS operation in a transaction now always raises an + error. GridFS does not support multi-document transactions. + + .. versionchanged:: 3.7 + Added the `disable_md5` parameter. + + .. versionchanged:: 3.1 + Indexes are only ensured on the first write to the DB. + + .. versionchanged:: 3.0 + `database` must use an acknowledged + :attr:`~pymongo.database.Database.write_concern` + + .. seealso:: The MongoDB documentation on `gridfs <https://dochub.mongodb.org/core/gridfs>`_. + """ + if not isinstance(database, Database): + raise TypeError("database must be an instance of Database") + + database = _clear_entity_type_registry(database) + + if not database.write_concern.acknowledged: + raise ConfigurationError("database must use acknowledged write_concern") + + self.__collection = database[collection] + self.__files = self.__collection.files + self.__chunks = self.__collection.chunks + + def new_file(self, **kwargs: Any) -> GridIn: + """Create a new file in GridFS. + + Returns a new :class:`~gridfs.grid_file.GridIn` instance to + which data can be written. Any keyword arguments will be + passed through to :meth:`~gridfs.grid_file.GridIn`. + + If the ``"_id"`` of the file is manually specified, it must + not already exist in GridFS. Otherwise + :class:`~gridfs.errors.FileExists` is raised. + + :Parameters: + - `**kwargs` (optional): keyword arguments for file creation + """ + return GridIn(self.__collection, **kwargs) + + def put(self, data: Any, **kwargs: Any) -> Any: + """Put data in GridFS as a new file. + + Equivalent to doing:: + + with fs.new_file(**kwargs) as f: + f.write(data) + + `data` can be either an instance of :class:`bytes` or a file-like + object providing a :meth:`read` method. If an `encoding` keyword + argument is passed, `data` can also be a :class:`str` instance, which + will be encoded as `encoding` before being written. Any keyword + arguments will be passed through to the created file - see + :meth:`~gridfs.grid_file.GridIn` for possible arguments. Returns the + ``"_id"`` of the created file. + + If the ``"_id"`` of the file is manually specified, it must + not already exist in GridFS. Otherwise + :class:`~gridfs.errors.FileExists` is raised. + + :Parameters: + - `data`: data to be written as a file. + - `**kwargs` (optional): keyword arguments for file creation + + .. versionchanged:: 3.0 + w=0 writes to GridFS are now prohibited. + """ + with GridIn(self.__collection, **kwargs) as grid_file: + grid_file.write(data) + return grid_file._id + + def get(self, file_id: Any, session: Optional[ClientSession] = None) -> GridOut: + """Get a file from GridFS by ``"_id"``. + + Returns an instance of :class:`~gridfs.grid_file.GridOut`, + which provides a file-like interface for reading. + + :Parameters: + - `file_id`: ``"_id"`` of the file to get + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + gout = GridOut(self.__collection, file_id, session=session) + + # Raise NoFile now, instead of on first attribute access. + gout._ensure_file() + return gout + + def get_version( + self, + filename: Optional[str] = None, + version: Optional[int] = -1, + session: Optional[ClientSession] = None, + **kwargs: Any + ) -> GridOut: + """Get a file from GridFS by ``"filename"`` or metadata fields. + + Returns a version of the file in GridFS whose filename matches + `filename` and whose metadata fields match the supplied keyword + arguments, as an instance of :class:`~gridfs.grid_file.GridOut`. + + Version numbering is a convenience atop the GridFS API provided + by MongoDB. If more than one file matches the query (either by + `filename` alone, by metadata fields, or by a combination of + both), then version ``-1`` will be the most recently uploaded + matching file, ``-2`` the second most recently + uploaded, etc. Version ``0`` will be the first version + uploaded, ``1`` the second version, etc. So if three versions + have been uploaded, then version ``0`` is the same as version + ``-3``, version ``1`` is the same as version ``-2``, and + version ``2`` is the same as version ``-1``. + + Raises :class:`~gridfs.errors.NoFile` if no such version of + that file exists. + + :Parameters: + - `filename`: ``"filename"`` of the file to get, or `None` + - `version` (optional): version of the file to get (defaults + to -1, the most recent version uploaded) + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + - `**kwargs` (optional): find files by custom metadata. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.1 + ``get_version`` no longer ensures indexes. + """ + query = kwargs + if filename is not None: + query["filename"] = filename + + _disallow_transactions(session) + cursor = self.__files.find(query, session=session) + if version is None: + version = -1 + if version < 0: + skip = abs(version) - 1 + cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING) + else: + cursor.limit(-1).skip(version).sort("uploadDate", ASCENDING) + try: + doc = next(cursor) + return GridOut(self.__collection, file_document=doc, session=session) + except StopIteration: + raise NoFile("no version %d for filename %r" % (version, filename)) + + def get_last_version( + self, filename: Optional[str] = None, session: Optional[ClientSession] = None, **kwargs: Any + ) -> GridOut: + """Get the most recent version of a file in GridFS by ``"filename"`` + or metadata fields. + + Equivalent to calling :meth:`get_version` with the default + `version` (``-1``). + + :Parameters: + - `filename`: ``"filename"`` of the file to get, or `None` + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + - `**kwargs` (optional): find files by custom metadata. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + return self.get_version(filename=filename, session=session, **kwargs) + + # TODO add optional safe mode for chunk removal? + def delete(self, file_id: Any, session: Optional[ClientSession] = None) -> None: + """Delete a file from GridFS by ``"_id"``. + + Deletes all data belonging to the file with ``"_id"``: + `file_id`. + + .. warning:: Any processes/threads reading from the file while + this method is executing will likely see an invalid/corrupt + file. Care should be taken to avoid concurrent reads to a file + while it is being deleted. + + .. note:: Deletes of non-existent files are considered successful + since the end result is the same: no file with that _id remains. + + :Parameters: + - `file_id`: ``"_id"`` of the file to delete + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.1 + ``delete`` no longer ensures indexes. + """ + _disallow_transactions(session) + self.__files.delete_one({"_id": file_id}, session=session) + self.__chunks.delete_many({"files_id": file_id}, session=session) + + def list(self, session: Optional[ClientSession] = None) -> List[str]: + """List the names of all files stored in this instance of + :class:`GridFS`. + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.1 + ``list`` no longer ensures indexes. + """ + _disallow_transactions(session) + # With an index, distinct includes documents with no filename + # as None. + return [ + name for name in self.__files.distinct("filename", session=session) if name is not None + ] + + def find_one( + self, + filter: Optional[Any] = None, + session: Optional[ClientSession] = None, + *args: Any, + **kwargs: Any + ) -> Optional[GridOut]: + """Get a single file from gridfs. + + All arguments to :meth:`find` are also valid arguments for + :meth:`find_one`, although any `limit` argument will be + ignored. Returns a single :class:`~gridfs.grid_file.GridOut`, + or ``None`` if no matching file is found. For example:: + + file = fs.find_one({"filename": "lisa.txt"}) + + :Parameters: + - `filter` (optional): a dictionary specifying + the query to be performing OR any other type to be used as + the value for a query for ``"_id"`` in the file collection. + - `*args` (optional): any additional positional arguments are + the same as the arguments to :meth:`find`. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + - `**kwargs` (optional): any additional keyword arguments + are the same as the arguments to :meth:`find`. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + if filter is not None and not isinstance(filter, abc.Mapping): + filter = {"_id": filter} + + _disallow_transactions(session) + for f in self.find(filter, *args, session=session, **kwargs): + return f + + return None + + def find(self, *args: Any, **kwargs: Any) -> GridOutCursor: + """Query GridFS for files. + + Returns a cursor that iterates across files matching + arbitrary queries on the files collection. Can be combined + with other modifiers for additional control. For example:: + + for grid_out in fs.find({"filename": "lisa.txt"}, + no_cursor_timeout=True): + data = grid_out.read() + + would iterate through all versions of "lisa.txt" stored in GridFS. + Note that setting no_cursor_timeout to True may be important to + prevent the cursor from timing out during long multi-file processing + work. + + As another example, the call:: + + most_recent_three = fs.find().sort("uploadDate", -1).limit(3) + + would return a cursor to the three most recently uploaded files + in GridFS. + + Follows a similar interface to + :meth:`~pymongo.collection.Collection.find` + in :class:`~pymongo.collection.Collection`. + + If a :class:`~pymongo.client_session.ClientSession` is passed to + :meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances + are associated with that session. + + :Parameters: + - `filter` (optional): A query document that selects which files + to include in the result set. Can be an empty document to include + all files. + - `skip` (optional): the number of files to omit (from + the start of the result set) when returning the results + - `limit` (optional): the maximum number of results to + return + - `no_cursor_timeout` (optional): if False (the default), any + returned cursor is closed by the server after 10 minutes of + inactivity. If set to True, the returned cursor will never + time out on the server. Care should be taken to ensure that + cursors with no_cursor_timeout turned on are properly closed. + - `sort` (optional): a list of (key, direction) pairs + specifying the sort order for this query. See + :meth:`~pymongo.cursor.Cursor.sort` for details. + + Raises :class:`TypeError` if any of the arguments are of + improper type. Returns an instance of + :class:`~gridfs.grid_file.GridOutCursor` + corresponding to this query. + + .. versionchanged:: 3.0 + Removed the read_preference, tag_sets, and + secondary_acceptable_latency_ms options. + .. versionadded:: 2.7 + .. seealso:: The MongoDB documentation on `find <https://dochub.mongodb.org/core/find>`_. + """ + return GridOutCursor(self.__collection, *args, **kwargs) + + def exists( + self, + document_or_id: Optional[Any] = None, + session: Optional[ClientSession] = None, + **kwargs: Any + ) -> bool: + """Check if a file exists in this instance of :class:`GridFS`. + + The file to check for can be specified by the value of its + ``_id`` key, or by passing in a query document. A query + document can be passed in as dictionary, or by using keyword + arguments. Thus, the following three calls are equivalent: + + >>> fs.exists(file_id) + >>> fs.exists({"_id": file_id}) + >>> fs.exists(_id=file_id) + + As are the following two calls: + + >>> fs.exists({"filename": "mike.txt"}) + >>> fs.exists(filename="mike.txt") + + And the following two: + + >>> fs.exists({"foo": {"$gt": 12}}) + >>> fs.exists(foo={"$gt": 12}) + + Returns ``True`` if a matching file exists, ``False`` + otherwise. Calls to :meth:`exists` will not automatically + create appropriate indexes; application developers should be + sure to create indexes if needed and as appropriate. + + :Parameters: + - `document_or_id` (optional): query document, or _id of the + document to check for + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + - `**kwargs` (optional): keyword arguments are used as a + query document, if they're present. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + _disallow_transactions(session) + if kwargs: + f = self.__files.find_one(kwargs, ["_id"], session=session) + else: + f = self.__files.find_one(document_or_id, ["_id"], session=session) + + return f is not None + + +class GridFSBucket: + """An instance of GridFS on top of a single Database.""" + + def __init__( + self, + db: Database, + bucket_name: str = "fs", + chunk_size_bytes: int = DEFAULT_CHUNK_SIZE, + write_concern: Optional[WriteConcern] = None, + read_preference: Optional[_ServerMode] = None, + ) -> None: + """Create a new instance of :class:`GridFSBucket`. + + Raises :exc:`TypeError` if `database` is not an instance of + :class:`~pymongo.database.Database`. + + Raises :exc:`~pymongo.errors.ConfigurationError` if `write_concern` + is not acknowledged. + + :Parameters: + - `database`: database to use. + - `bucket_name` (optional): The name of the bucket. Defaults to 'fs'. + - `chunk_size_bytes` (optional): The chunk size in bytes. Defaults + to 255KB. + - `write_concern` (optional): The + :class:`~pymongo.write_concern.WriteConcern` to use. If ``None`` + (the default) db.write_concern is used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) db.read_preference is used. + + .. versionchanged:: 4.0 + Removed the `disable_md5` parameter. See + :ref:`removed-gridfs-checksum` for details. + + .. versionchanged:: 3.11 + Running a GridFSBucket operation in a transaction now always raises + an error. GridFSBucket does not support multi-document transactions. + + .. versionchanged:: 3.7 + Added the `disable_md5` parameter. + + .. versionadded:: 3.1 + + .. seealso:: The MongoDB documentation on `gridfs <https://dochub.mongodb.org/core/gridfs>`_. + """ + if not isinstance(db, Database): + raise TypeError("database must be an instance of Database") + + db = _clear_entity_type_registry(db) + + wtc = write_concern if write_concern is not None else db.write_concern + if not wtc.acknowledged: + raise ConfigurationError("write concern must be acknowledged") + + self._bucket_name = bucket_name + self._collection = db[bucket_name] + self._chunks: Collection = self._collection.chunks.with_options( + write_concern=write_concern, read_preference=read_preference + ) + + self._files: Collection = self._collection.files.with_options( + write_concern=write_concern, read_preference=read_preference + ) + + self._chunk_size_bytes = chunk_size_bytes + self._timeout = db.client.options.timeout + + def open_upload_stream( + self, + filename: str, + chunk_size_bytes: Optional[int] = None, + metadata: Optional[Mapping[str, Any]] = None, + session: Optional[ClientSession] = None, + ) -> GridIn: + """Opens a Stream that the application can write the contents of the + file to. + + The user must specify the filename, and can choose to add any + additional information in the metadata field of the file document or + modify the chunk size. + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + with fs.open_upload_stream( + "test_file", chunk_size_bytes=4, + metadata={"contentType": "text/plain"}) as grid_in: + grid_in.write("data I want to store!") + # uploaded on close + + Returns an instance of :class:`~gridfs.grid_file.GridIn`. + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + Raises :exc:`~ValueError` if `filename` is not a string. + + :Parameters: + - `filename`: The name of the file to upload. + - `chunk_size_bytes` (options): The number of bytes per chunk of this + file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`. + - `metadata` (optional): User data for the 'metadata' field of the + files collection document. If not provided the metadata field will + be omitted from the files collection document. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + validate_string("filename", filename) + + opts = { + "filename": filename, + "chunk_size": ( + chunk_size_bytes if chunk_size_bytes is not None else self._chunk_size_bytes + ), + } + if metadata is not None: + opts["metadata"] = metadata + + return GridIn(self._collection, session=session, **opts) + + def open_upload_stream_with_id( + self, + file_id: Any, + filename: str, + chunk_size_bytes: Optional[int] = None, + metadata: Optional[Mapping[str, Any]] = None, + session: Optional[ClientSession] = None, + ) -> GridIn: + """Opens a Stream that the application can write the contents of the + file to. + + The user must specify the file id and filename, and can choose to add + any additional information in the metadata field of the file document + or modify the chunk size. + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + with fs.open_upload_stream_with_id( + ObjectId(), + "test_file", + chunk_size_bytes=4, + metadata={"contentType": "text/plain"}) as grid_in: + grid_in.write("data I want to store!") + # uploaded on close + + Returns an instance of :class:`~gridfs.grid_file.GridIn`. + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + Raises :exc:`~ValueError` if `filename` is not a string. + + :Parameters: + - `file_id`: The id to use for this file. The id must not have + already been used for another file. + - `filename`: The name of the file to upload. + - `chunk_size_bytes` (options): The number of bytes per chunk of this + file. Defaults to the chunk_size_bytes in :class:`GridFSBucket`. + - `metadata` (optional): User data for the 'metadata' field of the + files collection document. If not provided the metadata field will + be omitted from the files collection document. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + validate_string("filename", filename) + + opts = { + "_id": file_id, + "filename": filename, + "chunk_size": ( + chunk_size_bytes if chunk_size_bytes is not None else self._chunk_size_bytes + ), + } + if metadata is not None: + opts["metadata"] = metadata + + return GridIn(self._collection, session=session, **opts) + + @_csot.apply + def upload_from_stream( + self, + filename: str, + source: Any, + chunk_size_bytes: Optional[int] = None, + metadata: Optional[Mapping[str, Any]] = None, + session: Optional[ClientSession] = None, + ) -> ObjectId: + """Uploads a user file to a GridFS bucket. + + Reads the contents of the user file from `source` and uploads + it to the file `filename`. Source can be a string or file-like object. + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + file_id = fs.upload_from_stream( + "test_file", + "data I want to store!", + chunk_size_bytes=4, + metadata={"contentType": "text/plain"}) + + Returns the _id of the uploaded file. + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + Raises :exc:`~ValueError` if `filename` is not a string. + + :Parameters: + - `filename`: The name of the file to upload. + - `source`: The source stream of the content to be uploaded. Must be + a file-like object that implements :meth:`read` or a string. + - `chunk_size_bytes` (options): The number of bytes per chunk of this + file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`. + - `metadata` (optional): User data for the 'metadata' field of the + files collection document. If not provided the metadata field will + be omitted from the files collection document. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + with self.open_upload_stream(filename, chunk_size_bytes, metadata, session=session) as gin: + gin.write(source) + + return cast(ObjectId, gin._id) + + @_csot.apply + def upload_from_stream_with_id( + self, + file_id: Any, + filename: str, + source: Any, + chunk_size_bytes: Optional[int] = None, + metadata: Optional[Mapping[str, Any]] = None, + session: Optional[ClientSession] = None, + ) -> None: + """Uploads a user file to a GridFS bucket with a custom file id. + + Reads the contents of the user file from `source` and uploads + it to the file `filename`. Source can be a string or file-like object. + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + file_id = fs.upload_from_stream( + ObjectId(), + "test_file", + "data I want to store!", + chunk_size_bytes=4, + metadata={"contentType": "text/plain"}) + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + Raises :exc:`~ValueError` if `filename` is not a string. + + :Parameters: + - `file_id`: The id to use for this file. The id must not have + already been used for another file. + - `filename`: The name of the file to upload. + - `source`: The source stream of the content to be uploaded. Must be + a file-like object that implements :meth:`read` or a string. + - `chunk_size_bytes` (options): The number of bytes per chunk of this + file. Defaults to the chunk_size_bytes of :class:`GridFSBucket`. + - `metadata` (optional): User data for the 'metadata' field of the + files collection document. If not provided the metadata field will + be omitted from the files collection document. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + with self.open_upload_stream_with_id( + file_id, filename, chunk_size_bytes, metadata, session=session + ) as gin: + gin.write(source) + + def open_download_stream( + self, file_id: Any, session: Optional[ClientSession] = None + ) -> GridOut: + """Opens a Stream from which the application can read the contents of + the stored file specified by file_id. + + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + # get _id of file to read. + file_id = fs.upload_from_stream("test_file", "data I want to store!") + grid_out = fs.open_download_stream(file_id) + contents = grid_out.read() + + Returns an instance of :class:`~gridfs.grid_file.GridOut`. + + Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. + + :Parameters: + - `file_id`: The _id of the file to be downloaded. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + gout = GridOut(self._collection, file_id, session=session) + + # Raise NoFile now, instead of on first attribute access. + gout._ensure_file() + return gout + + @_csot.apply + def download_to_stream( + self, file_id: Any, destination: Any, session: Optional[ClientSession] = None + ) -> None: + """Downloads the contents of the stored file specified by file_id and + writes the contents to `destination`. + + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + # Get _id of file to read + file_id = fs.upload_from_stream("test_file", "data I want to store!") + # Get file to write to + file = open('myfile','wb+') + fs.download_to_stream(file_id, file) + file.seek(0) + contents = file.read() + + Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. + + :Parameters: + - `file_id`: The _id of the file to be downloaded. + - `destination`: a file-like object implementing :meth:`write`. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + with self.open_download_stream(file_id, session=session) as gout: + while True: + chunk = gout.readchunk() + if not len(chunk): + break + destination.write(chunk) + + @_csot.apply + def delete(self, file_id: Any, session: Optional[ClientSession] = None) -> None: + """Given an file_id, delete this stored file's files collection document + and associated chunks from a GridFS bucket. + + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + # Get _id of file to delete + file_id = fs.upload_from_stream("test_file", "data I want to store!") + fs.delete(file_id) + + Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. + + :Parameters: + - `file_id`: The _id of the file to be deleted. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + _disallow_transactions(session) + res = self._files.delete_one({"_id": file_id}, session=session) + self._chunks.delete_many({"files_id": file_id}, session=session) + if not res.deleted_count: + raise NoFile("no file could be deleted because none matched %s" % file_id) + + def find(self, *args: Any, **kwargs: Any) -> GridOutCursor: + """Find and return the files collection documents that match ``filter`` + + Returns a cursor that iterates across files matching + arbitrary queries on the files collection. Can be combined + with other modifiers for additional control. + + For example:: + + for grid_data in fs.find({"filename": "lisa.txt"}, + no_cursor_timeout=True): + data = grid_data.read() + + would iterate through all versions of "lisa.txt" stored in GridFS. + Note that setting no_cursor_timeout to True may be important to + prevent the cursor from timing out during long multi-file processing + work. + + As another example, the call:: + + most_recent_three = fs.find().sort("uploadDate", -1).limit(3) + + would return a cursor to the three most recently uploaded files + in GridFS. + + Follows a similar interface to + :meth:`~pymongo.collection.Collection.find` + in :class:`~pymongo.collection.Collection`. + + If a :class:`~pymongo.client_session.ClientSession` is passed to + :meth:`find`, all returned :class:`~gridfs.grid_file.GridOut` instances + are associated with that session. + + :Parameters: + - `filter`: Search query. + - `batch_size` (optional): The number of documents to return per + batch. + - `limit` (optional): The maximum number of documents to return. + - `no_cursor_timeout` (optional): The server normally times out idle + cursors after an inactivity period (10 minutes) to prevent excess + memory use. Set this option to True prevent that. + - `skip` (optional): The number of documents to skip before + returning. + - `sort` (optional): The order by which to sort results. Defaults to + None. + """ + return GridOutCursor(self._collection, *args, **kwargs) + + def open_download_stream_by_name( + self, filename: str, revision: int = -1, session: Optional[ClientSession] = None + ) -> GridOut: + """Opens a Stream from which the application can read the contents of + `filename` and optional `revision`. + + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + grid_out = fs.open_download_stream_by_name("test_file") + contents = grid_out.read() + + Returns an instance of :class:`~gridfs.grid_file.GridOut`. + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + + Raises :exc:`~ValueError` filename is not a string. + + :Parameters: + - `filename`: The name of the file to read from. + - `revision` (optional): Which revision (documents with the same + filename and different uploadDate) of the file to retrieve. + Defaults to -1 (the most recent revision). + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + :Note: Revision numbers are defined as follows: + + - 0 = the original stored file + - 1 = the first revision + - 2 = the second revision + - etc... + - -2 = the second most recent revision + - -1 = the most recent revision + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + validate_string("filename", filename) + query = {"filename": filename} + _disallow_transactions(session) + cursor = self._files.find(query, session=session) + if revision < 0: + skip = abs(revision) - 1 + cursor.limit(-1).skip(skip).sort("uploadDate", DESCENDING) + else: + cursor.limit(-1).skip(revision).sort("uploadDate", ASCENDING) + try: + grid_file = next(cursor) + return GridOut(self._collection, file_document=grid_file, session=session) + except StopIteration: + raise NoFile("no version %d for filename %r" % (revision, filename)) + + @_csot.apply + def download_to_stream_by_name( + self, + filename: str, + destination: Any, + revision: int = -1, + session: Optional[ClientSession] = None, + ) -> None: + """Write the contents of `filename` (with optional `revision`) to + `destination`. + + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + # Get file to write to + file = open('myfile','wb') + fs.download_to_stream_by_name("test_file", file) + + Raises :exc:`~gridfs.errors.NoFile` if no such version of + that file exists. + + Raises :exc:`~ValueError` if `filename` is not a string. + + :Parameters: + - `filename`: The name of the file to read from. + - `destination`: A file-like object that implements :meth:`write`. + - `revision` (optional): Which revision (documents with the same + filename and different uploadDate) of the file to retrieve. + Defaults to -1 (the most recent revision). + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + :Note: Revision numbers are defined as follows: + + - 0 = the original stored file + - 1 = the first revision + - 2 = the second revision + - etc... + - -2 = the second most recent revision + - -1 = the most recent revision + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + with self.open_download_stream_by_name(filename, revision, session=session) as gout: + while True: + chunk = gout.readchunk() + if not len(chunk): + break + destination.write(chunk) + + def rename( + self, file_id: Any, new_filename: str, session: Optional[ClientSession] = None + ) -> None: + """Renames the stored file with the specified file_id. + + For example:: + + my_db = MongoClient().test + fs = GridFSBucket(my_db) + # Get _id of file to rename + file_id = fs.upload_from_stream("test_file", "data I want to store!") + fs.rename(file_id, "new_test_name") + + Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. + + :Parameters: + - `file_id`: The _id of the file to be renamed. + - `new_filename`: The new name of the file. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + _disallow_transactions(session) + result = self._files.update_one( + {"_id": file_id}, {"$set": {"filename": new_filename}}, session=session + ) + if not result.matched_count: + raise NoFile( + "no files could be renamed %r because none " + "matched file_id %i" % (new_filename, file_id) + ) diff --git a/backend/test/lib/python3.8/site-packages/gridfs/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/gridfs/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0b1d9f7aab493cd211e1e5def2a71944c962bdd9 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/gridfs/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/gridfs/__pycache__/errors.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/gridfs/__pycache__/errors.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..00e68e662bf8b9c6df67fece6083b64c6a8eb848 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/gridfs/__pycache__/errors.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/gridfs/__pycache__/grid_file.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/gridfs/__pycache__/grid_file.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e246d3d597863bb24a09f8c989e27683531d8e4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/gridfs/__pycache__/grid_file.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/gridfs/errors.py b/backend/test/lib/python3.8/site-packages/gridfs/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..39736d55b3153e0a29fbab3a2c833316d5ed50a7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/gridfs/errors.py @@ -0,0 +1,33 @@ +# Copyright 2009-2015 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Exceptions raised by the :mod:`gridfs` package""" + +from pymongo.errors import PyMongoError + + +class GridFSError(PyMongoError): + """Base class for all GridFS exceptions.""" + + +class CorruptGridFile(GridFSError): + """Raised when a file in :class:`~gridfs.GridFS` is malformed.""" + + +class NoFile(GridFSError): + """Raised when trying to read from a non-existent file.""" + + +class FileExists(GridFSError): + """Raised when trying to create a file that already exists.""" diff --git a/backend/test/lib/python3.8/site-packages/gridfs/grid_file.py b/backend/test/lib/python3.8/site-packages/gridfs/grid_file.py new file mode 100644 index 0000000000000000000000000000000000000000..7a09f35ef01e144c60d9d6cc5e41bf2e9a7df7c2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/gridfs/grid_file.py @@ -0,0 +1,908 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for representing files stored in GridFS.""" +import datetime +import io +import math +import os +from typing import Any, Iterable, List, Mapping, NoReturn, Optional + +from bson.binary import Binary +from bson.int64 import Int64 +from bson.objectid import ObjectId +from bson.son import SON +from gridfs.errors import CorruptGridFile, FileExists, NoFile +from pymongo import ASCENDING +from pymongo.client_session import ClientSession +from pymongo.collection import Collection +from pymongo.cursor import Cursor +from pymongo.errors import ( + ConfigurationError, + CursorNotFound, + DuplicateKeyError, + InvalidOperation, + OperationFailure, +) +from pymongo.read_preferences import ReadPreference + +_SEEK_SET = os.SEEK_SET +_SEEK_CUR = os.SEEK_CUR +_SEEK_END = os.SEEK_END + +EMPTY = b"" +NEWLN = b"\n" + +"""Default chunk size, in bytes.""" +# Slightly under a power of 2, to work well with server's record allocations. +DEFAULT_CHUNK_SIZE = 255 * 1024 + +_C_INDEX: SON[str, Any] = SON([("files_id", ASCENDING), ("n", ASCENDING)]) +_F_INDEX: SON[str, Any] = SON([("filename", ASCENDING), ("uploadDate", ASCENDING)]) + + +def _grid_in_property( + field_name: str, + docstring: str, + read_only: Optional[bool] = False, + closed_only: Optional[bool] = False, +) -> Any: + """Create a GridIn property.""" + + def getter(self: Any) -> Any: + if closed_only and not self._closed: + raise AttributeError("can only get %r on a closed file" % field_name) + # Protect against PHP-237 + if field_name == "length": + return self._file.get(field_name, 0) + return self._file.get(field_name, None) + + def setter(self: Any, value: Any) -> Any: + if self._closed: + self._coll.files.update_one({"_id": self._file["_id"]}, {"$set": {field_name: value}}) + self._file[field_name] = value + + if read_only: + docstring += "\n\nThis attribute is read-only." + elif closed_only: + docstring = "{}\n\n{}".format( + docstring, + "This attribute is read-only and " + "can only be read after :meth:`close` " + "has been called.", + ) + + if not read_only and not closed_only: + return property(getter, setter, doc=docstring) + return property(getter, doc=docstring) + + +def _grid_out_property(field_name: str, docstring: str) -> Any: + """Create a GridOut property.""" + + def getter(self: Any) -> Any: + self._ensure_file() + + # Protect against PHP-237 + if field_name == "length": + return self._file.get(field_name, 0) + return self._file.get(field_name, None) + + docstring += "\n\nThis attribute is read-only." + return property(getter, doc=docstring) + + +def _clear_entity_type_registry(entity: Any, **kwargs: Any) -> Any: + """Clear the given database/collection object's type registry.""" + codecopts = entity.codec_options.with_options(type_registry=None) + return entity.with_options(codec_options=codecopts, **kwargs) + + +def _disallow_transactions(session: Optional[ClientSession]) -> None: + if session and session.in_transaction: + raise InvalidOperation("GridFS does not support multi-document transactions") + + +class GridIn: + """Class to write data to GridFS.""" + + def __init__( + self, root_collection: Collection, session: Optional[ClientSession] = None, **kwargs: Any + ) -> None: + """Write a file to GridFS + + Application developers should generally not need to + instantiate this class directly - instead see the methods + provided by :class:`~gridfs.GridFS`. + + Raises :class:`TypeError` if `root_collection` is not an + instance of :class:`~pymongo.collection.Collection`. + + Any of the file level options specified in the `GridFS Spec + <http://dochub.mongodb.org/core/gridfsspec>`_ may be passed as + keyword arguments. Any additional keyword arguments will be + set as additional fields on the file document. Valid keyword + arguments include: + + - ``"_id"``: unique ID for this file (default: + :class:`~bson.objectid.ObjectId`) - this ``"_id"`` must + not have already been used for another file + + - ``"filename"``: human name for the file + + - ``"contentType"`` or ``"content_type"``: valid mime-type + for the file + + - ``"chunkSize"`` or ``"chunk_size"``: size of each of the + chunks, in bytes (default: 255 kb) + + - ``"encoding"``: encoding used for this file. Any :class:`str` + that is written to the file will be converted to :class:`bytes`. + + :Parameters: + - `root_collection`: root collection to write to + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` to use for all + commands + - `**kwargs: Any` (optional): file level options (see above) + + .. versionchanged:: 4.0 + Removed the `disable_md5` parameter. See + :ref:`removed-gridfs-checksum` for details. + + .. versionchanged:: 3.7 + Added the `disable_md5` parameter. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.0 + `root_collection` must use an acknowledged + :attr:`~pymongo.collection.Collection.write_concern` + """ + if not isinstance(root_collection, Collection): + raise TypeError("root_collection must be an instance of Collection") + + if not root_collection.write_concern.acknowledged: + raise ConfigurationError("root_collection must use acknowledged write_concern") + _disallow_transactions(session) + + # Handle alternative naming + if "content_type" in kwargs: + kwargs["contentType"] = kwargs.pop("content_type") + if "chunk_size" in kwargs: + kwargs["chunkSize"] = kwargs.pop("chunk_size") + + coll = _clear_entity_type_registry(root_collection, read_preference=ReadPreference.PRIMARY) + + # Defaults + kwargs["_id"] = kwargs.get("_id", ObjectId()) + kwargs["chunkSize"] = kwargs.get("chunkSize", DEFAULT_CHUNK_SIZE) + object.__setattr__(self, "_session", session) + object.__setattr__(self, "_coll", coll) + object.__setattr__(self, "_chunks", coll.chunks) + object.__setattr__(self, "_file", kwargs) + object.__setattr__(self, "_buffer", io.BytesIO()) + object.__setattr__(self, "_position", 0) + object.__setattr__(self, "_chunk_number", 0) + object.__setattr__(self, "_closed", False) + object.__setattr__(self, "_ensured_index", False) + + def __create_index(self, collection: Collection, index_key: Any, unique: bool) -> None: + doc = collection.find_one(projection={"_id": 1}, session=self._session) + if doc is None: + try: + index_keys = [ + index_spec["key"] + for index_spec in collection.list_indexes(session=self._session) + ] + except OperationFailure: + index_keys = [] + if index_key not in index_keys: + collection.create_index(index_key.items(), unique=unique, session=self._session) + + def __ensure_indexes(self) -> None: + if not object.__getattribute__(self, "_ensured_index"): + _disallow_transactions(self._session) + self.__create_index(self._coll.files, _F_INDEX, False) + self.__create_index(self._coll.chunks, _C_INDEX, True) + object.__setattr__(self, "_ensured_index", True) + + def abort(self) -> None: + """Remove all chunks/files that may have been uploaded and close.""" + self._coll.chunks.delete_many({"files_id": self._file["_id"]}, session=self._session) + self._coll.files.delete_one({"_id": self._file["_id"]}, session=self._session) + object.__setattr__(self, "_closed", True) + + @property + def closed(self) -> bool: + """Is this file closed?""" + return self._closed + + _id: Any = _grid_in_property("_id", "The ``'_id'`` value for this file.", read_only=True) + filename: Optional[str] = _grid_in_property("filename", "Name of this file.") + name: Optional[str] = _grid_in_property("filename", "Alias for `filename`.") + content_type: Optional[str] = _grid_in_property("contentType", "Mime-type for this file.") + length: int = _grid_in_property("length", "Length (in bytes) of this file.", closed_only=True) + chunk_size: int = _grid_in_property("chunkSize", "Chunk size for this file.", read_only=True) + upload_date: datetime.datetime = _grid_in_property( + "uploadDate", "Date that this file was uploaded.", closed_only=True + ) + md5: Optional[str] = _grid_in_property( + "md5", "MD5 of the contents of this file if an md5 sum was created.", closed_only=True + ) + + _buffer: io.BytesIO + _closed: bool + + def __getattr__(self, name: str) -> Any: + if name in self._file: + return self._file[name] + raise AttributeError("GridIn object has no attribute '%s'" % name) + + def __setattr__(self, name: str, value: Any) -> None: + # For properties of this instance like _buffer, or descriptors set on + # the class like filename, use regular __setattr__ + if name in self.__dict__ or name in self.__class__.__dict__: + object.__setattr__(self, name, value) + else: + # All other attributes are part of the document in db.fs.files. + # Store them to be sent to server on close() or if closed, send + # them now. + self._file[name] = value + if self._closed: + self._coll.files.update_one({"_id": self._file["_id"]}, {"$set": {name: value}}) + + def __flush_data(self, data: Any) -> None: + """Flush `data` to a chunk.""" + self.__ensure_indexes() + if not data: + return + assert len(data) <= self.chunk_size + + chunk = {"files_id": self._file["_id"], "n": self._chunk_number, "data": Binary(data)} + + try: + self._chunks.insert_one(chunk, session=self._session) + except DuplicateKeyError: + self._raise_file_exists(self._file["_id"]) + self._chunk_number += 1 + self._position += len(data) + + def __flush_buffer(self) -> None: + """Flush the buffer contents out to a chunk.""" + self.__flush_data(self._buffer.getvalue()) + self._buffer.close() + self._buffer = io.BytesIO() + + def __flush(self) -> Any: + """Flush the file to the database.""" + try: + self.__flush_buffer() + # The GridFS spec says length SHOULD be an Int64. + self._file["length"] = Int64(self._position) + self._file["uploadDate"] = datetime.datetime.now(tz=datetime.timezone.utc) + + return self._coll.files.insert_one(self._file, session=self._session) + except DuplicateKeyError: + self._raise_file_exists(self._id) + + def _raise_file_exists(self, file_id: Any) -> NoReturn: + """Raise a FileExists exception for the given file_id.""" + raise FileExists("file with _id %r already exists" % file_id) + + def close(self) -> None: + """Flush the file and close it. + + A closed file cannot be written any more. Calling + :meth:`close` more than once is allowed. + """ + if not self._closed: + self.__flush() + object.__setattr__(self, "_closed", True) + + def read(self, size: int = -1) -> NoReturn: + raise io.UnsupportedOperation("read") + + def readable(self) -> bool: + return False + + def seekable(self) -> bool: + return False + + def write(self, data: Any) -> None: + """Write data to the file. There is no return value. + + `data` can be either a string of bytes or a file-like object + (implementing :meth:`read`). If the file has an + :attr:`encoding` attribute, `data` can also be a + :class:`str` instance, which will be encoded as + :attr:`encoding` before being written. + + Due to buffering, the data may not actually be written to the + database until the :meth:`close` method is called. Raises + :class:`ValueError` if this file is already closed. Raises + :class:`TypeError` if `data` is not an instance of + :class:`bytes`, a file-like object, or an instance of :class:`str`. + Unicode data is only allowed if the file has an :attr:`encoding` + attribute. + + :Parameters: + - `data`: string of bytes or file-like object to be written + to the file + """ + if self._closed: + raise ValueError("cannot write to a closed file") + + try: + # file-like + read = data.read + except AttributeError: + # string + if not isinstance(data, (str, bytes)): + raise TypeError("can only write strings or file-like objects") + if isinstance(data, str): + try: + data = data.encode(self.encoding) + except AttributeError: + raise TypeError("must specify an encoding for file in order to write str") + read = io.BytesIO(data).read + + if self._buffer.tell() > 0: + # Make sure to flush only when _buffer is complete + space = self.chunk_size - self._buffer.tell() + if space: + try: + to_write = read(space) + except BaseException: + self.abort() + raise + self._buffer.write(to_write) + if len(to_write) < space: + return # EOF or incomplete + self.__flush_buffer() + to_write = read(self.chunk_size) + while to_write and len(to_write) == self.chunk_size: + self.__flush_data(to_write) + to_write = read(self.chunk_size) + self._buffer.write(to_write) + + def writelines(self, sequence: Iterable[Any]) -> None: + """Write a sequence of strings to the file. + + Does not add separators. + """ + for line in sequence: + self.write(line) + + def writeable(self) -> bool: + return True + + def __enter__(self) -> "GridIn": + """Support for the context manager protocol.""" + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Any: + """Support for the context manager protocol. + + Close the file if no exceptions occur and allow exceptions to propagate. + """ + if exc_type is None: + # No exceptions happened. + self.close() + else: + # Something happened, at minimum mark as closed. + object.__setattr__(self, "_closed", True) + + # propagate exceptions + return False + + +class GridOut(io.IOBase): + """Class to read data out of GridFS.""" + + def __init__( + self, + root_collection: Collection, + file_id: Optional[int] = None, + file_document: Optional[Any] = None, + session: Optional[ClientSession] = None, + ) -> None: + """Read a file from GridFS + + Application developers should generally not need to + instantiate this class directly - instead see the methods + provided by :class:`~gridfs.GridFS`. + + Either `file_id` or `file_document` must be specified, + `file_document` will be given priority if present. Raises + :class:`TypeError` if `root_collection` is not an instance of + :class:`~pymongo.collection.Collection`. + + :Parameters: + - `root_collection`: root collection to read from + - `file_id` (optional): value of ``"_id"`` for the file to read + - `file_document` (optional): file document from + `root_collection.files` + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` to use for all + commands + + .. versionchanged:: 3.8 + For better performance and to better follow the GridFS spec, + :class:`GridOut` now uses a single cursor to read all the chunks in + the file. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.0 + Creating a GridOut does not immediately retrieve the file metadata + from the server. Metadata is fetched when first needed. + """ + if not isinstance(root_collection, Collection): + raise TypeError("root_collection must be an instance of Collection") + _disallow_transactions(session) + + root_collection = _clear_entity_type_registry(root_collection) + + super().__init__() + + self.__chunks = root_collection.chunks + self.__files = root_collection.files + self.__file_id = file_id + self.__buffer = EMPTY + # Start position within the current buffered chunk. + self.__buffer_pos = 0 + self.__chunk_iter = None + # Position within the total file. + self.__position = 0 + self._file = file_document + self._session = session + + _id: Any = _grid_out_property("_id", "The ``'_id'`` value for this file.") + filename: str = _grid_out_property("filename", "Name of this file.") + name: str = _grid_out_property("filename", "Alias for `filename`.") + content_type: Optional[str] = _grid_out_property("contentType", "Mime-type for this file.") + length: int = _grid_out_property("length", "Length (in bytes) of this file.") + chunk_size: int = _grid_out_property("chunkSize", "Chunk size for this file.") + upload_date: datetime.datetime = _grid_out_property( + "uploadDate", "Date that this file was first uploaded." + ) + aliases: Optional[List[str]] = _grid_out_property("aliases", "List of aliases for this file.") + metadata: Optional[Mapping[str, Any]] = _grid_out_property( + "metadata", "Metadata attached to this file." + ) + md5: Optional[str] = _grid_out_property( + "md5", "MD5 of the contents of this file if an md5 sum was created." + ) + + _file: Any + __chunk_iter: Any + + def _ensure_file(self) -> None: + if not self._file: + _disallow_transactions(self._session) + self._file = self.__files.find_one({"_id": self.__file_id}, session=self._session) + if not self._file: + raise NoFile( + f"no file in gridfs collection {self.__files!r} with _id {self.__file_id!r}" + ) + + def __getattr__(self, name: str) -> Any: + self._ensure_file() + if name in self._file: + return self._file[name] + raise AttributeError("GridOut object has no attribute '%s'" % name) + + def readable(self) -> bool: + return True + + def readchunk(self) -> bytes: + """Reads a chunk at a time. If the current position is within a + chunk the remainder of the chunk is returned. + """ + received = len(self.__buffer) - self.__buffer_pos + chunk_data = EMPTY + chunk_size = int(self.chunk_size) + + if received > 0: + chunk_data = self.__buffer[self.__buffer_pos :] + elif self.__position < int(self.length): + chunk_number = int((received + self.__position) / chunk_size) + if self.__chunk_iter is None: + self.__chunk_iter = _GridOutChunkIterator( + self, self.__chunks, self._session, chunk_number + ) + + chunk = self.__chunk_iter.next() + chunk_data = chunk["data"][self.__position % chunk_size :] + + if not chunk_data: + raise CorruptGridFile("truncated chunk") + + self.__position += len(chunk_data) + self.__buffer = EMPTY + self.__buffer_pos = 0 + return chunk_data + + def _read_size_or_line(self, size: int = -1, line: bool = False) -> bytes: + """Internal read() and readline() helper.""" + self._ensure_file() + remainder = int(self.length) - self.__position + if size < 0 or size > remainder: + size = remainder + + if size == 0: + return EMPTY + + received = 0 + data = [] + while received < size: + needed = size - received + if self.__buffer: + # Optimization: Read the buffer with zero byte copies. + buf = self.__buffer + chunk_start = self.__buffer_pos + chunk_data = memoryview(buf)[self.__buffer_pos :] + self.__buffer = EMPTY + self.__buffer_pos = 0 + self.__position += len(chunk_data) + else: + buf = self.readchunk() + chunk_start = 0 + chunk_data = memoryview(buf) + if line: + pos = buf.find(NEWLN, chunk_start, chunk_start + needed) - chunk_start + if pos >= 0: + # Decrease size to exit the loop. + size = received + pos + 1 + needed = pos + 1 + if len(chunk_data) > needed: + data.append(chunk_data[:needed]) + # Optimization: Save the buffer with zero byte copies. + self.__buffer = buf + self.__buffer_pos = chunk_start + needed + self.__position -= len(self.__buffer) - self.__buffer_pos + else: + data.append(chunk_data) + received += len(chunk_data) + + # Detect extra chunks after reading the entire file. + if size == remainder and self.__chunk_iter: + try: + self.__chunk_iter.next() + except StopIteration: + pass + + return b"".join(data) + + def read(self, size: int = -1) -> bytes: + """Read at most `size` bytes from the file (less if there + isn't enough data). + + The bytes are returned as an instance of :class:`bytes` + If `size` is negative or omitted all data is read. + + :Parameters: + - `size` (optional): the number of bytes to read + + .. versionchanged:: 3.8 + This method now only checks for extra chunks after reading the + entire file. Previously, this method would check for extra chunks + on every call. + """ + return self._read_size_or_line(size=size) + + def readline(self, size: int = -1) -> bytes: # type: ignore[override] + """Read one line or up to `size` bytes from the file. + + :Parameters: + - `size` (optional): the maximum number of bytes to read + """ + return self._read_size_or_line(size=size, line=True) + + def tell(self) -> int: + """Return the current position of this file.""" + return self.__position + + def seek(self, pos: int, whence: int = _SEEK_SET) -> int: + """Set the current position of this file. + + :Parameters: + - `pos`: the position (or offset if using relative + positioning) to seek to + - `whence` (optional): where to seek + from. :attr:`os.SEEK_SET` (``0``) for absolute file + positioning, :attr:`os.SEEK_CUR` (``1``) to seek relative + to the current position, :attr:`os.SEEK_END` (``2``) to + seek relative to the file's end. + + .. versionchanged:: 4.1 + The method now returns the new position in the file, to + conform to the behavior of :meth:`io.IOBase.seek`. + """ + if whence == _SEEK_SET: + new_pos = pos + elif whence == _SEEK_CUR: + new_pos = self.__position + pos + elif whence == _SEEK_END: + new_pos = int(self.length) + pos + else: + raise OSError(22, "Invalid value for `whence`") + + if new_pos < 0: + raise OSError(22, "Invalid value for `pos` - must be positive") + + # Optimization, continue using the same buffer and chunk iterator. + if new_pos == self.__position: + return new_pos + + self.__position = new_pos + self.__buffer = EMPTY + self.__buffer_pos = 0 + if self.__chunk_iter: + self.__chunk_iter.close() + self.__chunk_iter = None + return new_pos + + def seekable(self) -> bool: + return True + + def __iter__(self) -> "GridOut": + """Return an iterator over all of this file's data. + + The iterator will return lines (delimited by ``b'\\n'``) of + :class:`bytes`. This can be useful when serving files + using a webserver that handles such an iterator efficiently. + + .. versionchanged:: 3.8 + The iterator now raises :class:`CorruptGridFile` when encountering + any truncated, missing, or extra chunk in a file. The previous + behavior was to only raise :class:`CorruptGridFile` on a missing + chunk. + + .. versionchanged:: 4.0 + The iterator now iterates over *lines* in the file, instead + of chunks, to conform to the base class :py:class:`io.IOBase`. + Use :meth:`GridOut.readchunk` to read chunk by chunk instead + of line by line. + """ + return self + + def close(self) -> None: + """Make GridOut more generically file-like.""" + if self.__chunk_iter: + self.__chunk_iter.close() + self.__chunk_iter = None + super().close() + + def write(self, value: Any) -> NoReturn: + raise io.UnsupportedOperation("write") + + def writelines(self, lines: Any) -> NoReturn: + raise io.UnsupportedOperation("writelines") + + def writable(self) -> bool: + return False + + def __enter__(self) -> "GridOut": + """Makes it possible to use :class:`GridOut` files + with the context manager protocol. + """ + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Any: + """Makes it possible to use :class:`GridOut` files + with the context manager protocol. + """ + self.close() + return False + + def fileno(self) -> NoReturn: + raise io.UnsupportedOperation("fileno") + + def flush(self) -> None: + # GridOut is read-only, so flush does nothing. + pass + + def isatty(self) -> bool: + return False + + def truncate(self, size: Optional[int] = None) -> NoReturn: + # See https://docs.python.org/3/library/io.html#io.IOBase.writable + # for why truncate has to raise. + raise io.UnsupportedOperation("truncate") + + # Override IOBase.__del__ otherwise it will lead to __getattr__ on + # __IOBase_closed which calls _ensure_file and potentially performs I/O. + # We cannot do I/O in __del__ since it can lead to a deadlock. + def __del__(self) -> None: + pass + + +class _GridOutChunkIterator: + """Iterates over a file's chunks using a single cursor. + + Raises CorruptGridFile when encountering any truncated, missing, or extra + chunk in a file. + """ + + def __init__( + self, + grid_out: GridOut, + chunks: Collection, + session: Optional[ClientSession], + next_chunk: Any, + ) -> None: + self._id = grid_out._id + self._chunk_size = int(grid_out.chunk_size) + self._length = int(grid_out.length) + self._chunks = chunks + self._session = session + self._next_chunk = next_chunk + self._num_chunks = math.ceil(float(self._length) / self._chunk_size) + self._cursor = None + + _cursor: Optional[Cursor] + + def expected_chunk_length(self, chunk_n: int) -> int: + if chunk_n < self._num_chunks - 1: + return self._chunk_size + return self._length - (self._chunk_size * (self._num_chunks - 1)) + + def __iter__(self) -> "_GridOutChunkIterator": + return self + + def _create_cursor(self) -> None: + filter = {"files_id": self._id} + if self._next_chunk > 0: + filter["n"] = {"$gte": self._next_chunk} + _disallow_transactions(self._session) + self._cursor = self._chunks.find(filter, sort=[("n", 1)], session=self._session) + + def _next_with_retry(self) -> Mapping[str, Any]: + """Return the next chunk and retry once on CursorNotFound. + + We retry on CursorNotFound to maintain backwards compatibility in + cases where two calls to read occur more than 10 minutes apart (the + server's default cursor timeout). + """ + if self._cursor is None: + self._create_cursor() + assert self._cursor is not None + try: + return self._cursor.next() + except CursorNotFound: + self._cursor.close() + self._create_cursor() + return self._cursor.next() + + def next(self) -> Mapping[str, Any]: + try: + chunk = self._next_with_retry() + except StopIteration: + if self._next_chunk >= self._num_chunks: + raise + raise CorruptGridFile("no chunk #%d" % self._next_chunk) + + if chunk["n"] != self._next_chunk: + self.close() + raise CorruptGridFile( + "Missing chunk: expected chunk #%d but found " + "chunk with n=%d" % (self._next_chunk, chunk["n"]) + ) + + if chunk["n"] >= self._num_chunks: + # According to spec, ignore extra chunks if they are empty. + if len(chunk["data"]): + self.close() + raise CorruptGridFile( + "Extra chunk found: expected %d chunks but found " + "chunk with n=%d" % (self._num_chunks, chunk["n"]) + ) + + expected_length = self.expected_chunk_length(chunk["n"]) + if len(chunk["data"]) != expected_length: + self.close() + raise CorruptGridFile( + "truncated chunk #%d: expected chunk length to be %d but " + "found chunk with length %d" % (chunk["n"], expected_length, len(chunk["data"])) + ) + + self._next_chunk += 1 + return chunk + + __next__ = next + + def close(self) -> None: + if self._cursor: + self._cursor.close() + self._cursor = None + + +class GridOutIterator: + def __init__(self, grid_out: GridOut, chunks: Collection, session: ClientSession): + self.__chunk_iter = _GridOutChunkIterator(grid_out, chunks, session, 0) + + def __iter__(self) -> "GridOutIterator": + return self + + def next(self) -> bytes: + chunk = self.__chunk_iter.next() + return bytes(chunk["data"]) + + __next__ = next + + +class GridOutCursor(Cursor): + """A cursor / iterator for returning GridOut objects as the result + of an arbitrary query against the GridFS files collection. + """ + + def __init__( + self, + collection: Collection, + filter: Optional[Mapping[str, Any]] = None, + skip: int = 0, + limit: int = 0, + no_cursor_timeout: bool = False, + sort: Optional[Any] = None, + batch_size: int = 0, + session: Optional[ClientSession] = None, + ) -> None: + """Create a new cursor, similar to the normal + :class:`~pymongo.cursor.Cursor`. + + Should not be called directly by application developers - see + the :class:`~gridfs.GridFS` method :meth:`~gridfs.GridFS.find` instead. + + .. versionadded 2.7 + + .. seealso:: The MongoDB documentation on `cursors <https://dochub.mongodb.org/core/cursors>`_. + """ + _disallow_transactions(session) + collection = _clear_entity_type_registry(collection) + + # Hold on to the base "fs" collection to create GridOut objects later. + self.__root_collection = collection + + super().__init__( + collection.files, + filter, + skip=skip, + limit=limit, + no_cursor_timeout=no_cursor_timeout, + sort=sort, + batch_size=batch_size, + session=session, + ) + + def next(self) -> GridOut: + """Get next GridOut object from cursor.""" + _disallow_transactions(self.session) + next_file = super().next() + return GridOut(self.__root_collection, file_document=next_file, session=self.session) + + __next__ = next + + def add_option(self, *args: Any, **kwargs: Any) -> NoReturn: + raise NotImplementedError("Method does not exist for GridOutCursor") + + def remove_option(self, *args: Any, **kwargs: Any) -> NoReturn: + raise NotImplementedError("Method does not exist for GridOutCursor") + + def _clone_base(self, session: Optional[ClientSession]) -> "GridOutCursor": + """Creates an empty GridOutCursor for information to be copied into.""" + return GridOutCursor(self.__root_collection, session=session) diff --git a/backend/test/lib/python3.8/site-packages/gridfs/py.typed b/backend/test/lib/python3.8/site-packages/gridfs/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..0f4057061a763b445a4300825a450069a96f5719 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/gridfs/py.typed @@ -0,0 +1,2 @@ +# PEP-561 Support File. +# "Package maintainers who wish to support type checking of their code MUST add a marker file named py.typed to their package supporting typing". diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/LICENSE b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d645695673349e3947e8e5ae42332d0ac3164cd7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..639bbea98c8e32cb03572e5c4bddb9afe7c325fa --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/METADATA @@ -0,0 +1,138 @@ +Metadata-Version: 2.1 +Name: importlib-metadata +Version: 6.8.0 +Summary: Read metadata from Python packages +Home-page: https://github.com/python/importlib_metadata +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.8 +License-File: LICENSE +Requires-Dist: zipp (>=0.5) +Requires-Dist: typing-extensions (>=3.6.4) ; python_version < "3.8" +Provides-Extra: docs +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' +Provides-Extra: perf +Requires-Dist: ipython ; extra == 'perf' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=2.2) ; extra == 'testing' +Requires-Dist: pytest-ruff ; extra == 'testing' +Requires-Dist: packaging ; extra == 'testing' +Requires-Dist: pyfakefs ; extra == 'testing' +Requires-Dist: flufl.flake8 ; extra == 'testing' +Requires-Dist: pytest-perf (>=0.9.2) ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: importlib-resources (>=1.3) ; (python_version < "3.9") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/importlib_metadata.svg + :target: https://pypi.org/project/importlib_metadata + +.. image:: https://img.shields.io/pypi/pyversions/importlib_metadata.svg + +.. image:: https://github.com/python/importlib_metadata/workflows/tests/badge.svg + :target: https://github.com/python/importlib_metadata/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json + :target: https://github.com/astral-sh/ruff + :alt: Ruff + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. image:: https://readthedocs.org/projects/importlib-metadata/badge/?version=latest + :target: https://importlib-metadata.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2023-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://tidelift.com/badges/package/pypi/importlib-metadata + :target: https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=readme + +Library to access the metadata for a Python package. + +This package supplies third-party access to the functionality of +`importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_ +including improvements added to subsequent Python versions. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - importlib_metadata + - stdlib + * - 6.5 + - 3.12 + * - 4.13 + - 3.11 + * - 4.6 + - 3.10 + * - 1.4 + - 3.8 + + +Usage +===== + +See the `online documentation <https://importlib-metadata.readthedocs.io/>`_ +for usage details. + +`Finder authors +<https://docs.python.org/3/reference/import.html#finders-and-loaders>`_ can +also add support for custom package installers. See the above documentation +for details. + + +Caveats +======= + +This project primarily supports third-party packages installed by PyPA +tools (or other conforming packages). It does not support: + +- Packages in the stdlib. +- Packages installed without metadata. + +Project details +=============== + + * Project home: https://github.com/python/importlib_metadata + * Report bugs at: https://github.com/python/importlib_metadata/issues + * Code hosting: https://github.com/python/importlib_metadata + * Documentation: https://importlib-metadata.readthedocs.io/ + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more <https://tidelift.com/subscription/pkg/pypi-importlib-metadata?utm_source=pypi-importlib-metadata&utm_medium=referral&utm_campaign=github>`_. + +Security Contact +================ + +To report a security vulnerability, please use the +`Tidelift security contact <https://tidelift.com/security>`_. +Tidelift will coordinate the fix and disclosure. diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..27c340841777fe94eb9b20aa6d0268be75ba11af --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/RECORD @@ -0,0 +1,25 @@ +importlib_metadata-6.8.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +importlib_metadata-6.8.0.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358 +importlib_metadata-6.8.0.dist-info/METADATA,sha256=X79qGRh7gqvuaL_utK5X-MnwHJuIWke0e3eAx0IiLhc,5067 +importlib_metadata-6.8.0.dist-info/RECORD,, +importlib_metadata-6.8.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +importlib_metadata-6.8.0.dist-info/top_level.txt,sha256=CO3fD9yylANiXkrMo4qHLV_mqXL2sC5JFKgt1yWAT-A,19 +importlib_metadata/__init__.py,sha256=EiH0qTKP_6oa6pRGJgPrq0kvjnL3hJ18BJH8VaAYSBA,30749 +importlib_metadata/__pycache__/__init__.cpython-38.pyc,, +importlib_metadata/__pycache__/_adapters.cpython-38.pyc,, +importlib_metadata/__pycache__/_collections.cpython-38.pyc,, +importlib_metadata/__pycache__/_compat.cpython-38.pyc,, +importlib_metadata/__pycache__/_functools.cpython-38.pyc,, +importlib_metadata/__pycache__/_itertools.cpython-38.pyc,, +importlib_metadata/__pycache__/_meta.cpython-38.pyc,, +importlib_metadata/__pycache__/_py39compat.cpython-38.pyc,, +importlib_metadata/__pycache__/_text.cpython-38.pyc,, +importlib_metadata/_adapters.py,sha256=i8S6Ib1OQjcILA-l4gkzktMZe18TaeUNI49PLRp6OBU,2454 +importlib_metadata/_collections.py,sha256=CJ0OTCHIjWA0ZIVS4voORAsn2R4R2cQBEtPsZEJpASY,743 +importlib_metadata/_compat.py,sha256=zhjcWMfA9SNExFVVVBozOYbuiok0A4tdMsNk9ZDZi-A,1554 +importlib_metadata/_functools.py,sha256=PsY2-4rrKX4RVeRC1oGp1lB1pmC9eKN88_f-bD9uOoA,2895 +importlib_metadata/_itertools.py,sha256=cvr_2v8BRbxcIl5x5ldfqdHjhI8Yi8s8yk50G_nm6jQ,2068 +importlib_metadata/_meta.py,sha256=kypMW_-xSStooSm0WpJc6eupjT-Ipc2ZBIl23PyC3No,1613 +importlib_metadata/_py39compat.py,sha256=2Tk5twb_VgLCY-1NEAQjdZp_S9OFMC-pUzP2isuaPsQ,1098 +importlib_metadata/_text.py,sha256=HCsFksZpJLeTP3NEk_ngrAeXVRRtTrtyh9eOABoRP4A,2166 +importlib_metadata/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..1f37c02f2eb2e26b306202feaccb31e522b8b169 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..bbb07547a19c30031d13c45cf01cba61dc434e47 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata-6.8.0.dist-info/top_level.txt @@ -0,0 +1 @@ +importlib_metadata diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__init__.py b/backend/test/lib/python3.8/site-packages/importlib_metadata/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6ba414e5540687b80a83fbc1a651158ab20c2027 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata/__init__.py @@ -0,0 +1,1015 @@ +import os +import re +import abc +import csv +import sys +import zipp +import email +import inspect +import pathlib +import operator +import textwrap +import warnings +import functools +import itertools +import posixpath +import collections + +from . import _adapters, _meta, _py39compat +from ._collections import FreezableDefaultDict, Pair +from ._compat import ( + NullFinder, + StrPath, + install, + pypy_partial, +) +from ._functools import method_cache, pass_none +from ._itertools import always_iterable, unique_everseen +from ._meta import PackageMetadata, SimplePath + +from contextlib import suppress +from importlib import import_module +from importlib.abc import MetaPathFinder +from itertools import starmap +from typing import Iterable, List, Mapping, Optional, Set, cast + +__all__ = [ + 'Distribution', + 'DistributionFinder', + 'PackageMetadata', + 'PackageNotFoundError', + 'distribution', + 'distributions', + 'entry_points', + 'files', + 'metadata', + 'packages_distributions', + 'requires', + 'version', +] + + +class PackageNotFoundError(ModuleNotFoundError): + """The package was not found.""" + + def __str__(self) -> str: + return f"No package metadata was found for {self.name}" + + @property + def name(self) -> str: # type: ignore[override] + (name,) = self.args + return name + + +class Sectioned: + """ + A simple entry point config parser for performance + + >>> for item in Sectioned.read(Sectioned._sample): + ... print(item) + Pair(name='sec1', value='# comments ignored') + Pair(name='sec1', value='a = 1') + Pair(name='sec1', value='b = 2') + Pair(name='sec2', value='a = 2') + + >>> res = Sectioned.section_pairs(Sectioned._sample) + >>> item = next(res) + >>> item.name + 'sec1' + >>> item.value + Pair(name='a', value='1') + >>> item = next(res) + >>> item.value + Pair(name='b', value='2') + >>> item = next(res) + >>> item.name + 'sec2' + >>> item.value + Pair(name='a', value='2') + >>> list(res) + [] + """ + + _sample = textwrap.dedent( + """ + [sec1] + # comments ignored + a = 1 + b = 2 + + [sec2] + a = 2 + """ + ).lstrip() + + @classmethod + def section_pairs(cls, text): + return ( + section._replace(value=Pair.parse(section.value)) + for section in cls.read(text, filter_=cls.valid) + if section.name is not None + ) + + @staticmethod + def read(text, filter_=None): + lines = filter(filter_, map(str.strip, text.splitlines())) + name = None + for value in lines: + section_match = value.startswith('[') and value.endswith(']') + if section_match: + name = value.strip('[]') + continue + yield Pair(name, value) + + @staticmethod + def valid(line: str): + return line and not line.startswith('#') + + +class DeprecatedTuple: + """ + Provide subscript item access for backward compatibility. + + >>> recwarn = getfixture('recwarn') + >>> ep = EntryPoint(name='name', value='value', group='group') + >>> ep[:] + ('name', 'value', 'group') + >>> ep[0] + 'name' + >>> len(recwarn) + 1 + """ + + # Do not remove prior to 2023-05-01 or Python 3.13 + _warn = functools.partial( + warnings.warn, + "EntryPoint tuple interface is deprecated. Access members by name.", + DeprecationWarning, + stacklevel=pypy_partial(2), + ) + + def __getitem__(self, item): + self._warn() + return self._key()[item] + + +class EntryPoint(DeprecatedTuple): + """An entry point as defined by Python packaging conventions. + + See `the packaging docs on entry points + <https://packaging.python.org/specifications/entry-points/>`_ + for more information. + + >>> ep = EntryPoint( + ... name=None, group=None, value='package.module:attr [extra1, extra2]') + >>> ep.module + 'package.module' + >>> ep.attr + 'attr' + >>> ep.extras + ['extra1', 'extra2'] + """ + + pattern = re.compile( + r'(?P<module>[\w.]+)\s*' + r'(:\s*(?P<attr>[\w.]+)\s*)?' + r'((?P<extras>\[.*\])\s*)?$' + ) + """ + A regular expression describing the syntax for an entry point, + which might look like: + + - module + - package.module + - package.module:attribute + - package.module:object.attribute + - package.module:attr [extra1, extra2] + + Other combinations are possible as well. + + The expression is lenient about whitespace around the ':', + following the attr, and following any extras. + """ + + name: str + value: str + group: str + + dist: Optional['Distribution'] = None + + def __init__(self, name: str, value: str, group: str) -> None: + vars(self).update(name=name, value=value, group=group) + + def load(self): + """Load the entry point from its definition. If only a module + is indicated by the value, return that module. Otherwise, + return the named object. + """ + match = self.pattern.match(self.value) + module = import_module(match.group('module')) + attrs = filter(None, (match.group('attr') or '').split('.')) + return functools.reduce(getattr, attrs, module) + + @property + def module(self) -> str: + match = self.pattern.match(self.value) + assert match is not None + return match.group('module') + + @property + def attr(self) -> str: + match = self.pattern.match(self.value) + assert match is not None + return match.group('attr') + + @property + def extras(self) -> List[str]: + match = self.pattern.match(self.value) + assert match is not None + return re.findall(r'\w+', match.group('extras') or '') + + def _for(self, dist): + vars(self).update(dist=dist) + return self + + def matches(self, **params): + """ + EntryPoint matches the given parameters. + + >>> ep = EntryPoint(group='foo', name='bar', value='bing:bong [extra1, extra2]') + >>> ep.matches(group='foo') + True + >>> ep.matches(name='bar', value='bing:bong [extra1, extra2]') + True + >>> ep.matches(group='foo', name='other') + False + >>> ep.matches() + True + >>> ep.matches(extras=['extra1', 'extra2']) + True + >>> ep.matches(module='bing') + True + >>> ep.matches(attr='bong') + True + """ + attrs = (getattr(self, param) for param in params) + return all(map(operator.eq, params.values(), attrs)) + + def _key(self): + return self.name, self.value, self.group + + def __lt__(self, other): + return self._key() < other._key() + + def __eq__(self, other): + return self._key() == other._key() + + def __setattr__(self, name, value): + raise AttributeError("EntryPoint objects are immutable.") + + def __repr__(self): + return ( + f'EntryPoint(name={self.name!r}, value={self.value!r}, ' + f'group={self.group!r})' + ) + + def __hash__(self) -> int: + return hash(self._key()) + + +class EntryPoints(tuple): + """ + An immutable collection of selectable EntryPoint objects. + """ + + __slots__ = () + + def __getitem__(self, name: str) -> EntryPoint: # type: ignore[override] + """ + Get the EntryPoint in self matching name. + """ + try: + return next(iter(self.select(name=name))) + except StopIteration: + raise KeyError(name) + + def select(self, **params): + """ + Select entry points from self that match the + given parameters (typically group and/or name). + """ + return EntryPoints(ep for ep in self if _py39compat.ep_matches(ep, **params)) + + @property + def names(self) -> Set[str]: + """ + Return the set of all names of all entry points. + """ + return {ep.name for ep in self} + + @property + def groups(self) -> Set[str]: + """ + Return the set of all groups of all entry points. + """ + return {ep.group for ep in self} + + @classmethod + def _from_text_for(cls, text, dist): + return cls(ep._for(dist) for ep in cls._from_text(text)) + + @staticmethod + def _from_text(text): + return ( + EntryPoint(name=item.value.name, value=item.value.value, group=item.name) + for item in Sectioned.section_pairs(text or '') + ) + + +class PackagePath(pathlib.PurePosixPath): + """A reference to a path in a package""" + + hash: Optional["FileHash"] + size: int + dist: "Distribution" + + def read_text(self, encoding: str = 'utf-8') -> str: # type: ignore[override] + with self.locate().open(encoding=encoding) as stream: + return stream.read() + + def read_binary(self) -> bytes: + with self.locate().open('rb') as stream: + return stream.read() + + def locate(self) -> pathlib.Path: + """Return a path-like object for this path""" + return self.dist.locate_file(self) + + +class FileHash: + def __init__(self, spec: str) -> None: + self.mode, _, self.value = spec.partition('=') + + def __repr__(self) -> str: + return f'<FileHash mode: {self.mode} value: {self.value}>' + + +class DeprecatedNonAbstract: + def __new__(cls, *args, **kwargs): + all_names = { + name for subclass in inspect.getmro(cls) for name in vars(subclass) + } + abstract = { + name + for name in all_names + if getattr(getattr(cls, name), '__isabstractmethod__', False) + } + if abstract: + warnings.warn( + f"Unimplemented abstract methods {abstract}", + DeprecationWarning, + stacklevel=2, + ) + return super().__new__(cls) + + +class Distribution(DeprecatedNonAbstract): + """A Python distribution package.""" + + @abc.abstractmethod + def read_text(self, filename) -> Optional[str]: + """Attempt to load metadata file given by the name. + + :param filename: The name of the file in the distribution info. + :return: The text if found, otherwise None. + """ + + @abc.abstractmethod + def locate_file(self, path: StrPath) -> pathlib.Path: + """ + Given a path to a file in this distribution, return a path + to it. + """ + + @classmethod + def from_name(cls, name: str) -> "Distribution": + """Return the Distribution for the given package name. + + :param name: The name of the distribution package to search for. + :return: The Distribution instance (or subclass thereof) for the named + package, if found. + :raises PackageNotFoundError: When the named package's distribution + metadata cannot be found. + :raises ValueError: When an invalid value is supplied for name. + """ + if not name: + raise ValueError("A distribution name is required.") + try: + return next(iter(cls.discover(name=name))) + except StopIteration: + raise PackageNotFoundError(name) + + @classmethod + def discover(cls, **kwargs) -> Iterable["Distribution"]: + """Return an iterable of Distribution objects for all packages. + + Pass a ``context`` or pass keyword arguments for constructing + a context. + + :context: A ``DistributionFinder.Context`` object. + :return: Iterable of Distribution objects for all packages. + """ + context = kwargs.pop('context', None) + if context and kwargs: + raise ValueError("cannot accept context and kwargs") + context = context or DistributionFinder.Context(**kwargs) + return itertools.chain.from_iterable( + resolver(context) for resolver in cls._discover_resolvers() + ) + + @staticmethod + def at(path: StrPath) -> "Distribution": + """Return a Distribution for the indicated metadata path + + :param path: a string or path-like object + :return: a concrete Distribution instance for the path + """ + return PathDistribution(pathlib.Path(path)) + + @staticmethod + def _discover_resolvers(): + """Search the meta_path for resolvers.""" + declared = ( + getattr(finder, 'find_distributions', None) for finder in sys.meta_path + ) + return filter(None, declared) + + @property + def metadata(self) -> _meta.PackageMetadata: + """Return the parsed metadata for this Distribution. + + The returned object will have keys that name the various bits of + metadata. See PEP 566 for details. + """ + opt_text = ( + self.read_text('METADATA') + or self.read_text('PKG-INFO') + # This last clause is here to support old egg-info files. Its + # effect is to just end up using the PathDistribution's self._path + # (which points to the egg-info file) attribute unchanged. + or self.read_text('') + ) + text = cast(str, opt_text) + return _adapters.Message(email.message_from_string(text)) + + @property + def name(self) -> str: + """Return the 'Name' metadata for the distribution package.""" + return self.metadata['Name'] + + @property + def _normalized_name(self): + """Return a normalized version of the name.""" + return Prepared.normalize(self.name) + + @property + def version(self) -> str: + """Return the 'Version' metadata for the distribution package.""" + return self.metadata['Version'] + + @property + def entry_points(self) -> EntryPoints: + return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) + + @property + def files(self) -> Optional[List[PackagePath]]: + """Files in this distribution. + + :return: List of PackagePath for this distribution or None + + Result is `None` if the metadata file that enumerates files + (i.e. RECORD for dist-info, or installed-files.txt or + SOURCES.txt for egg-info) is missing. + Result may be empty if the metadata exists but is empty. + """ + + def make_file(name, hash=None, size_str=None): + result = PackagePath(name) + result.hash = FileHash(hash) if hash else None + result.size = int(size_str) if size_str else None + result.dist = self + return result + + @pass_none + def make_files(lines): + return starmap(make_file, csv.reader(lines)) + + @pass_none + def skip_missing_files(package_paths): + return list(filter(lambda path: path.locate().exists(), package_paths)) + + return skip_missing_files( + make_files( + self._read_files_distinfo() + or self._read_files_egginfo_installed() + or self._read_files_egginfo_sources() + ) + ) + + def _read_files_distinfo(self): + """ + Read the lines of RECORD + """ + text = self.read_text('RECORD') + return text and text.splitlines() + + def _read_files_egginfo_installed(self): + """ + Read installed-files.txt and return lines in a similar + CSV-parsable format as RECORD: each file must be placed + relative to the site-packages directory and must also be + quoted (since file names can contain literal commas). + + This file is written when the package is installed by pip, + but it might not be written for other installation methods. + Assume the file is accurate if it exists. + """ + text = self.read_text('installed-files.txt') + # Prepend the .egg-info/ subdir to the lines in this file. + # But this subdir is only available from PathDistribution's + # self._path. + subdir = getattr(self, '_path', None) + if not text or not subdir: + return + + paths = ( + (subdir / name) + .resolve() + .relative_to(self.locate_file('').resolve()) + .as_posix() + for name in text.splitlines() + ) + return map('"{}"'.format, paths) + + def _read_files_egginfo_sources(self): + """ + Read SOURCES.txt and return lines in a similar CSV-parsable + format as RECORD: each file name must be quoted (since it + might contain literal commas). + + Note that SOURCES.txt is not a reliable source for what + files are installed by a package. This file is generated + for a source archive, and the files that are present + there (e.g. setup.py) may not correctly reflect the files + that are present after the package has been installed. + """ + text = self.read_text('SOURCES.txt') + return text and map('"{}"'.format, text.splitlines()) + + @property + def requires(self) -> Optional[List[str]]: + """Generated requirements specified for this Distribution""" + reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs() + return reqs and list(reqs) + + def _read_dist_info_reqs(self): + return self.metadata.get_all('Requires-Dist') + + def _read_egg_info_reqs(self): + source = self.read_text('requires.txt') + return pass_none(self._deps_from_requires_text)(source) + + @classmethod + def _deps_from_requires_text(cls, source): + return cls._convert_egg_info_reqs_to_simple_reqs(Sectioned.read(source)) + + @staticmethod + def _convert_egg_info_reqs_to_simple_reqs(sections): + """ + Historically, setuptools would solicit and store 'extra' + requirements, including those with environment markers, + in separate sections. More modern tools expect each + dependency to be defined separately, with any relevant + extras and environment markers attached directly to that + requirement. This method converts the former to the + latter. See _test_deps_from_requires_text for an example. + """ + + def make_condition(name): + return name and f'extra == "{name}"' + + def quoted_marker(section): + section = section or '' + extra, sep, markers = section.partition(':') + if extra and markers: + markers = f'({markers})' + conditions = list(filter(None, [markers, make_condition(extra)])) + return '; ' + ' and '.join(conditions) if conditions else '' + + def url_req_space(req): + """ + PEP 508 requires a space between the url_spec and the quoted_marker. + Ref python/importlib_metadata#357. + """ + # '@' is uniquely indicative of a url_req. + return ' ' * ('@' in req) + + for section in sections: + space = url_req_space(section.value) + yield section.value + space + quoted_marker(section.name) + + +class DistributionFinder(MetaPathFinder): + """ + A MetaPathFinder capable of discovering installed distributions. + """ + + class Context: + """ + Keyword arguments presented by the caller to + ``distributions()`` or ``Distribution.discover()`` + to narrow the scope of a search for distributions + in all DistributionFinders. + + Each DistributionFinder may expect any parameters + and should attempt to honor the canonical + parameters defined below when appropriate. + """ + + name = None + """ + Specific name for which a distribution finder should match. + A name of ``None`` matches all distributions. + """ + + def __init__(self, **kwargs): + vars(self).update(kwargs) + + @property + def path(self) -> List[str]: + """ + The sequence of directory path that a distribution finder + should search. + + Typically refers to Python installed package paths such as + "site-packages" directories and defaults to ``sys.path``. + """ + return vars(self).get('path', sys.path) + + @abc.abstractmethod + def find_distributions(self, context=Context()) -> Iterable[Distribution]: + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching the ``context``, + a DistributionFinder.Context instance. + """ + + +class FastPath: + """ + Micro-optimized class for searching a path for + children. + + >>> FastPath('').children() + ['...'] + """ + + @functools.lru_cache() # type: ignore + def __new__(cls, root): + return super().__new__(cls) + + def __init__(self, root): + self.root = root + + def joinpath(self, child): + return pathlib.Path(self.root, child) + + def children(self): + with suppress(Exception): + return os.listdir(self.root or '.') + with suppress(Exception): + return self.zip_children() + return [] + + def zip_children(self): + zip_path = zipp.Path(self.root) + names = zip_path.root.namelist() + self.joinpath = zip_path.joinpath + + return dict.fromkeys(child.split(posixpath.sep, 1)[0] for child in names) + + def search(self, name): + return self.lookup(self.mtime).search(name) + + @property + def mtime(self): + with suppress(OSError): + return os.stat(self.root).st_mtime + self.lookup.cache_clear() + + @method_cache + def lookup(self, mtime): + return Lookup(self) + + +class Lookup: + def __init__(self, path: FastPath): + base = os.path.basename(path.root).lower() + base_is_egg = base.endswith(".egg") + self.infos = FreezableDefaultDict(list) + self.eggs = FreezableDefaultDict(list) + + for child in path.children(): + low = child.lower() + if low.endswith((".dist-info", ".egg-info")): + # rpartition is faster than splitext and suitable for this purpose. + name = low.rpartition(".")[0].partition("-")[0] + normalized = Prepared.normalize(name) + self.infos[normalized].append(path.joinpath(child)) + elif base_is_egg and low == "egg-info": + name = base.rpartition(".")[0].partition("-")[0] + legacy_normalized = Prepared.legacy_normalize(name) + self.eggs[legacy_normalized].append(path.joinpath(child)) + + self.infos.freeze() + self.eggs.freeze() + + def search(self, prepared): + infos = ( + self.infos[prepared.normalized] + if prepared + else itertools.chain.from_iterable(self.infos.values()) + ) + eggs = ( + self.eggs[prepared.legacy_normalized] + if prepared + else itertools.chain.from_iterable(self.eggs.values()) + ) + return itertools.chain(infos, eggs) + + +class Prepared: + """ + A prepared search for metadata on a possibly-named package. + """ + + normalized = None + legacy_normalized = None + + def __init__(self, name): + self.name = name + if name is None: + return + self.normalized = self.normalize(name) + self.legacy_normalized = self.legacy_normalize(name) + + @staticmethod + def normalize(name): + """ + PEP 503 normalization plus dashes as underscores. + """ + return re.sub(r"[-_.]+", "-", name).lower().replace('-', '_') + + @staticmethod + def legacy_normalize(name): + """ + Normalize the package name as found in the convention in + older packaging tools versions and specs. + """ + return name.lower().replace('-', '_') + + def __bool__(self): + return bool(self.name) + + +@install +class MetadataPathFinder(NullFinder, DistributionFinder): + """A degenerate finder for distribution packages on the file system. + + This finder supplies only a find_distributions() method for versions + of Python that do not have a PathFinder find_distributions(). + """ + + def find_distributions( + self, context=DistributionFinder.Context() + ) -> Iterable["PathDistribution"]: + """ + Find distributions. + + Return an iterable of all Distribution instances capable of + loading the metadata for packages matching ``context.name`` + (or all names if ``None`` indicated) along the paths in the list + of directories ``context.path``. + """ + found = self._search_paths(context.name, context.path) + return map(PathDistribution, found) + + @classmethod + def _search_paths(cls, name, paths): + """Find metadata directories in paths heuristically.""" + prepared = Prepared(name) + return itertools.chain.from_iterable( + path.search(prepared) for path in map(FastPath, paths) + ) + + def invalidate_caches(cls) -> None: + FastPath.__new__.cache_clear() + + +class PathDistribution(Distribution): + def __init__(self, path: SimplePath) -> None: + """Construct a distribution. + + :param path: SimplePath indicating the metadata directory. + """ + self._path = path + + def read_text(self, filename: StrPath) -> Optional[str]: + with suppress( + FileNotFoundError, + IsADirectoryError, + KeyError, + NotADirectoryError, + PermissionError, + ): + return self._path.joinpath(filename).read_text(encoding='utf-8') + + return None + + read_text.__doc__ = Distribution.read_text.__doc__ + + def locate_file(self, path: StrPath) -> pathlib.Path: + return self._path.parent / path + + @property + def _normalized_name(self): + """ + Performance optimization: where possible, resolve the + normalized name from the file system path. + """ + stem = os.path.basename(str(self._path)) + return ( + pass_none(Prepared.normalize)(self._name_from_stem(stem)) + or super()._normalized_name + ) + + @staticmethod + def _name_from_stem(stem): + """ + >>> PathDistribution._name_from_stem('foo-3.0.egg-info') + 'foo' + >>> PathDistribution._name_from_stem('CherryPy-3.0.dist-info') + 'CherryPy' + >>> PathDistribution._name_from_stem('face.egg-info') + 'face' + >>> PathDistribution._name_from_stem('foo.bar') + """ + filename, ext = os.path.splitext(stem) + if ext not in ('.dist-info', '.egg-info'): + return + name, sep, rest = filename.partition('-') + return name + + +def distribution(distribution_name: str) -> Distribution: + """Get the ``Distribution`` instance for the named package. + + :param distribution_name: The name of the distribution package as a string. + :return: A ``Distribution`` instance (or subclass thereof). + """ + return Distribution.from_name(distribution_name) + + +def distributions(**kwargs) -> Iterable[Distribution]: + """Get all ``Distribution`` instances in the current environment. + + :return: An iterable of ``Distribution`` instances. + """ + return Distribution.discover(**kwargs) + + +def metadata(distribution_name: str) -> _meta.PackageMetadata: + """Get the metadata for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: A PackageMetadata containing the parsed metadata. + """ + return Distribution.from_name(distribution_name).metadata + + +def version(distribution_name: str) -> str: + """Get the version string for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: The version string for the package as defined in the package's + "Version" metadata key. + """ + return distribution(distribution_name).version + + +_unique = functools.partial( + unique_everseen, + key=_py39compat.normalized_name, +) +""" +Wrapper for ``distributions`` to return unique distributions by name. +""" + + +def entry_points(**params) -> EntryPoints: + """Return EntryPoint objects for all installed packages. + + Pass selection parameters (group or name) to filter the + result to entry points matching those properties (see + EntryPoints.select()). + + :return: EntryPoints for all installed packages. + """ + eps = itertools.chain.from_iterable( + dist.entry_points for dist in _unique(distributions()) + ) + return EntryPoints(eps).select(**params) + + +def files(distribution_name: str) -> Optional[List[PackagePath]]: + """Return a list of files for the named package. + + :param distribution_name: The name of the distribution package to query. + :return: List of files composing the distribution. + """ + return distribution(distribution_name).files + + +def requires(distribution_name: str) -> Optional[List[str]]: + """ + Return a list of requirements for the named package. + + :return: An iterable of requirements, suitable for + packaging.requirement.Requirement. + """ + return distribution(distribution_name).requires + + +def packages_distributions() -> Mapping[str, List[str]]: + """ + Return a mapping of top-level packages to their + distributions. + + >>> import collections.abc + >>> pkgs = packages_distributions() + >>> all(isinstance(dist, collections.abc.Sequence) for dist in pkgs.values()) + True + """ + pkg_to_dist = collections.defaultdict(list) + for dist in distributions(): + for pkg in _top_level_declared(dist) or _top_level_inferred(dist): + pkg_to_dist[pkg].append(dist.metadata['Name']) + return dict(pkg_to_dist) + + +def _top_level_declared(dist): + return (dist.read_text('top_level.txt') or '').split() + + +def _topmost(name: PackagePath) -> Optional[str]: + """ + Return the top-most parent as long as there is a parent. + """ + top, *rest = name.parts + return top if rest else None + + +def _get_toplevel_name(name: PackagePath) -> str: + """ + Infer a possibly importable module name from a name presumed on + sys.path. + + >>> _get_toplevel_name(PackagePath('foo.py')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo.pyc')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo/__init__.py')) + 'foo' + >>> _get_toplevel_name(PackagePath('foo.pth')) + 'foo.pth' + >>> _get_toplevel_name(PackagePath('foo.dist-info')) + 'foo.dist-info' + """ + return _topmost(name) or ( + # python/typeshed#10328 + inspect.getmodulename(name) # type: ignore + or str(name) + ) + + +def _top_level_inferred(dist): + opt_names = set(map(_get_toplevel_name, always_iterable(dist.files))) + + def importable_name(name): + return '.' not in name + + return filter(importable_name, opt_names) diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3863773ffa9427eacc4c41b07440036de933e868 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_adapters.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_adapters.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..37c4adcf8cf2392563c91438c4337c12d875b572 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_adapters.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_collections.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_collections.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48d8cd03250643c173a8c2c5f9a3d6caca6d78df Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_collections.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4553db5c00b35e2e12381f69c06a48e92256ad9 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_functools.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_functools.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c97f0d610c1639bcabf0b54fa47b8900aede72e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_functools.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_itertools.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_itertools.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04d55558e9f483a4fc632bc2ee6f44f2da556e77 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_itertools.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_meta.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_meta.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3c67a062161039b8b4c3f1bf31b80c6a60465c28 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_meta.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_py39compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_py39compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..847ecf08437dbacb7e26c443744e64983fb93804 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_py39compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_text.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_text.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d59fbff31c9c8c8be0e9be1ec991f78e20a554c3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/importlib_metadata/__pycache__/_text.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/_adapters.py b/backend/test/lib/python3.8/site-packages/importlib_metadata/_adapters.py new file mode 100644 index 0000000000000000000000000000000000000000..e33cba5e44d4f7c62a479db7ea215f7a06ad6efa --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata/_adapters.py @@ -0,0 +1,90 @@ +import functools +import warnings +import re +import textwrap +import email.message + +from ._text import FoldedCase +from ._compat import pypy_partial + + +# Do not remove prior to 2024-01-01 or Python 3.14 +_warn = functools.partial( + warnings.warn, + "Implicit None on return values is deprecated and will raise KeyErrors.", + DeprecationWarning, + stacklevel=pypy_partial(2), +) + + +class Message(email.message.Message): + multiple_use_keys = set( + map( + FoldedCase, + [ + 'Classifier', + 'Obsoletes-Dist', + 'Platform', + 'Project-URL', + 'Provides-Dist', + 'Provides-Extra', + 'Requires-Dist', + 'Requires-External', + 'Supported-Platform', + 'Dynamic', + ], + ) + ) + """ + Keys that may be indicated multiple times per PEP 566. + """ + + def __new__(cls, orig: email.message.Message): + res = super().__new__(cls) + vars(res).update(vars(orig)) + return res + + def __init__(self, *args, **kwargs): + self._headers = self._repair_headers() + + # suppress spurious error from mypy + def __iter__(self): + return super().__iter__() + + def __getitem__(self, item): + """ + Warn users that a ``KeyError`` can be expected when a + mising key is supplied. Ref python/importlib_metadata#371. + """ + res = super().__getitem__(item) + if res is None: + _warn() + return res + + def _repair_headers(self): + def redent(value): + "Correct for RFC822 indentation" + if not value or '\n' not in value: + return value + return textwrap.dedent(' ' * 8 + value) + + headers = [(key, redent(value)) for key, value in vars(self)['_headers']] + if self._payload: + headers.append(('Description', self.get_payload())) + return headers + + @property + def json(self): + """ + Convert PackageMetadata to a JSON-compatible format + per PEP 0566. + """ + + def transform(key): + value = self.get_all(key) if key in self.multiple_use_keys else self[key] + if key == 'Keywords': + value = re.split(r'\s+', value) + tk = key.lower().replace('-', '_') + return tk, value + + return dict(map(transform, map(FoldedCase, self))) diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/_collections.py b/backend/test/lib/python3.8/site-packages/importlib_metadata/_collections.py new file mode 100644 index 0000000000000000000000000000000000000000..cf0954e1a30546d781bf25781ec716ef92a77e32 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata/_collections.py @@ -0,0 +1,30 @@ +import collections + + +# from jaraco.collections 3.3 +class FreezableDefaultDict(collections.defaultdict): + """ + Often it is desirable to prevent the mutation of + a default dict after its initial construction, such + as to prevent mutation during iteration. + + >>> dd = FreezableDefaultDict(list) + >>> dd[0].append('1') + >>> dd.freeze() + >>> dd[1] + [] + >>> len(dd) + 1 + """ + + def __missing__(self, key): + return getattr(self, '_frozen', super().__missing__)(key) + + def freeze(self): + self._frozen = lambda key: self.default_factory() + + +class Pair(collections.namedtuple('Pair', 'name value')): + @classmethod + def parse(cls, text): + return cls(*map(str.strip, text.split("=", 1))) diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/_compat.py b/backend/test/lib/python3.8/site-packages/importlib_metadata/_compat.py new file mode 100644 index 0000000000000000000000000000000000000000..c0f15c780a8f404dc4635a15c22c0756ae282fcc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata/_compat.py @@ -0,0 +1,67 @@ +import os +import sys +import platform + +from typing import Union + + +__all__ = ['install', 'NullFinder'] + + +def install(cls): + """ + Class decorator for installation on sys.meta_path. + + Adds the backport DistributionFinder to sys.meta_path and + attempts to disable the finder functionality of the stdlib + DistributionFinder. + """ + sys.meta_path.append(cls()) + disable_stdlib_finder() + return cls + + +def disable_stdlib_finder(): + """ + Give the backport primacy for discovering path-based distributions + by monkey-patching the stdlib O_O. + + See #91 for more background for rationale on this sketchy + behavior. + """ + + def matches(finder): + return getattr( + finder, '__module__', None + ) == '_frozen_importlib_external' and hasattr(finder, 'find_distributions') + + for finder in filter(matches, sys.meta_path): # pragma: nocover + del finder.find_distributions + + +class NullFinder: + """ + A "Finder" (aka "MetaClassFinder") that never finds any modules, + but may find distributions. + """ + + @staticmethod + def find_spec(*args, **kwargs): + return None + + +def pypy_partial(val): + """ + Adjust for variable stacklevel on partial under PyPy. + + Workaround for #327. + """ + is_pypy = platform.python_implementation() == 'PyPy' + return val + is_pypy + + +if sys.version_info >= (3, 9): + StrPath = Union[str, os.PathLike[str]] +else: + # PathLike is only subscriptable at runtime in 3.9+ + StrPath = Union[str, "os.PathLike[str]"] # pragma: no cover diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/_functools.py b/backend/test/lib/python3.8/site-packages/importlib_metadata/_functools.py new file mode 100644 index 0000000000000000000000000000000000000000..71f66bd03cb713a2190853bdf7170c4ea80d2425 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata/_functools.py @@ -0,0 +1,104 @@ +import types +import functools + + +# from jaraco.functools 3.3 +def method_cache(method, cache_wrapper=None): + """ + Wrap lru_cache to support storing the cache data in the object instances. + + Abstracts the common paradigm where the method explicitly saves an + underscore-prefixed protected property on first call and returns that + subsequently. + + >>> class MyClass: + ... calls = 0 + ... + ... @method_cache + ... def method(self, value): + ... self.calls += 1 + ... return value + + >>> a = MyClass() + >>> a.method(3) + 3 + >>> for x in range(75): + ... res = a.method(x) + >>> a.calls + 75 + + Note that the apparent behavior will be exactly like that of lru_cache + except that the cache is stored on each instance, so values in one + instance will not flush values from another, and when an instance is + deleted, so are the cached values for that instance. + + >>> b = MyClass() + >>> for x in range(35): + ... res = b.method(x) + >>> b.calls + 35 + >>> a.method(0) + 0 + >>> a.calls + 75 + + Note that if method had been decorated with ``functools.lru_cache()``, + a.calls would have been 76 (due to the cached value of 0 having been + flushed by the 'b' instance). + + Clear the cache with ``.cache_clear()`` + + >>> a.method.cache_clear() + + Same for a method that hasn't yet been called. + + >>> c = MyClass() + >>> c.method.cache_clear() + + Another cache wrapper may be supplied: + + >>> cache = functools.lru_cache(maxsize=2) + >>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache) + >>> a = MyClass() + >>> a.method2() + 3 + + Caution - do not subsequently wrap the method with another decorator, such + as ``@property``, which changes the semantics of the function. + + See also + http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/ + for another implementation and additional justification. + """ + cache_wrapper = cache_wrapper or functools.lru_cache() + + def wrapper(self, *args, **kwargs): + # it's the first call, replace the method with a cached, bound method + bound_method = types.MethodType(method, self) + cached_method = cache_wrapper(bound_method) + setattr(self, method.__name__, cached_method) + return cached_method(*args, **kwargs) + + # Support cache clear even before cache has been created. + wrapper.cache_clear = lambda: None + + return wrapper + + +# From jaraco.functools 3.3 +def pass_none(func): + """ + Wrap func so it's not called if its first param is None + + >>> print_text = pass_none(print) + >>> print_text('text') + text + >>> print_text(None) + """ + + @functools.wraps(func) + def wrapper(param, *args, **kwargs): + if param is not None: + return func(param, *args, **kwargs) + + return wrapper diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/_itertools.py b/backend/test/lib/python3.8/site-packages/importlib_metadata/_itertools.py new file mode 100644 index 0000000000000000000000000000000000000000..d4ca9b9140e3f085b36609bb8dfdaea79c78e144 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata/_itertools.py @@ -0,0 +1,73 @@ +from itertools import filterfalse + + +def unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + +# copied from more_itertools 8.8 +def always_iterable(obj, base_type=(str, bytes)): + """If *obj* is iterable, return an iterator over its items:: + + >>> obj = (1, 2, 3) + >>> list(always_iterable(obj)) + [1, 2, 3] + + If *obj* is not iterable, return a one-item iterable containing *obj*:: + + >>> obj = 1 + >>> list(always_iterable(obj)) + [1] + + If *obj* is ``None``, return an empty iterable: + + >>> obj = None + >>> list(always_iterable(None)) + [] + + By default, binary and text strings are not considered iterable:: + + >>> obj = 'foo' + >>> list(always_iterable(obj)) + ['foo'] + + If *base_type* is set, objects for which ``isinstance(obj, base_type)`` + returns ``True`` won't be considered iterable. + + >>> obj = {'a': 1} + >>> list(always_iterable(obj)) # Iterate over the dict's keys + ['a'] + >>> list(always_iterable(obj, base_type=dict)) # Treat dicts as a unit + [{'a': 1}] + + Set *base_type* to ``None`` to avoid any special handling and treat objects + Python considers iterable as iterable: + + >>> obj = 'foo' + >>> list(always_iterable(obj, base_type=None)) + ['f', 'o', 'o'] + """ + if obj is None: + return iter(()) + + if (base_type is not None) and isinstance(obj, base_type): + return iter((obj,)) + + try: + return iter(obj) + except TypeError: + return iter((obj,)) diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/_meta.py b/backend/test/lib/python3.8/site-packages/importlib_metadata/_meta.py new file mode 100644 index 0000000000000000000000000000000000000000..f670016de7fef207636726e51c2754846a157ca8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata/_meta.py @@ -0,0 +1,63 @@ +from typing import Protocol +from typing import Any, Dict, Iterator, List, Optional, TypeVar, Union, overload + + +_T = TypeVar("_T") + + +class PackageMetadata(Protocol): + def __len__(self) -> int: + ... # pragma: no cover + + def __contains__(self, item: str) -> bool: + ... # pragma: no cover + + def __getitem__(self, key: str) -> str: + ... # pragma: no cover + + def __iter__(self) -> Iterator[str]: + ... # pragma: no cover + + @overload + def get(self, name: str, failobj: None = None) -> Optional[str]: + ... # pragma: no cover + + @overload + def get(self, name: str, failobj: _T) -> Union[str, _T]: + ... # pragma: no cover + + # overload per python/importlib_metadata#435 + @overload + def get_all(self, name: str, failobj: None = None) -> Optional[List[Any]]: + ... # pragma: no cover + + @overload + def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]: + """ + Return all values associated with a possibly multi-valued key. + """ + + @property + def json(self) -> Dict[str, Union[str, List[str]]]: + """ + A JSON-compatible form of the metadata. + """ + + +class SimplePath(Protocol[_T]): + """ + A minimal subset of pathlib.Path required by PathDistribution. + """ + + def joinpath(self, other: Union[str, _T]) -> _T: + ... # pragma: no cover + + def __truediv__(self, other: Union[str, _T]) -> _T: + ... # pragma: no cover + + @property + def parent(self) -> _T: + ... # pragma: no cover + + def read_text(self) -> str: + ... # pragma: no cover diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/_py39compat.py b/backend/test/lib/python3.8/site-packages/importlib_metadata/_py39compat.py new file mode 100644 index 0000000000000000000000000000000000000000..cde4558fbbeb938de8a5aa8ee165465c23180ffb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata/_py39compat.py @@ -0,0 +1,35 @@ +""" +Compatibility layer with Python 3.8/3.9 +""" +from typing import TYPE_CHECKING, Any, Optional + +if TYPE_CHECKING: # pragma: no cover + # Prevent circular imports on runtime. + from . import Distribution, EntryPoint +else: + Distribution = EntryPoint = Any + + +def normalized_name(dist: Distribution) -> Optional[str]: + """ + Honor name normalization for distributions that don't provide ``_normalized_name``. + """ + try: + return dist._normalized_name + except AttributeError: + from . import Prepared # -> delay to prevent circular imports. + + return Prepared.normalize(getattr(dist, "name", None) or dist.metadata['Name']) + + +def ep_matches(ep: EntryPoint, **params) -> bool: + """ + Workaround for ``EntryPoint`` objects without the ``matches`` method. + """ + try: + return ep.matches(**params) + except AttributeError: + from . import EntryPoint # -> delay to prevent circular imports. + + # Reconstruct the EntryPoint object to make sure it is compatible. + return EntryPoint(ep.name, ep.value, ep.group).matches(**params) diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/_text.py b/backend/test/lib/python3.8/site-packages/importlib_metadata/_text.py new file mode 100644 index 0000000000000000000000000000000000000000..c88cfbb2349c6401336bc5ba6623f51afd1eb59d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/importlib_metadata/_text.py @@ -0,0 +1,99 @@ +import re + +from ._functools import method_cache + + +# from jaraco.text 3.5 +class FoldedCase(str): + """ + A case insensitive string class; behaves just like str + except compares equal when the only variation is case. + + >>> s = FoldedCase('hello world') + + >>> s == 'Hello World' + True + + >>> 'Hello World' == s + True + + >>> s != 'Hello World' + False + + >>> s.index('O') + 4 + + >>> s.split('O') + ['hell', ' w', 'rld'] + + >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta'])) + ['alpha', 'Beta', 'GAMMA'] + + Sequence membership is straightforward. + + >>> "Hello World" in [s] + True + >>> s in ["Hello World"] + True + + You may test for set inclusion, but candidate and elements + must both be folded. + + >>> FoldedCase("Hello World") in {s} + True + >>> s in {FoldedCase("Hello World")} + True + + String inclusion works as long as the FoldedCase object + is on the right. + + >>> "hello" in FoldedCase("Hello World") + True + + But not if the FoldedCase object is on the left: + + >>> FoldedCase('hello') in 'Hello World' + False + + In that case, use in_: + + >>> FoldedCase('hello').in_('Hello World') + True + + >>> FoldedCase('hello') > FoldedCase('Hello') + False + """ + + def __lt__(self, other): + return self.lower() < other.lower() + + def __gt__(self, other): + return self.lower() > other.lower() + + def __eq__(self, other): + return self.lower() == other.lower() + + def __ne__(self, other): + return self.lower() != other.lower() + + def __hash__(self): + return hash(self.lower()) + + def __contains__(self, other): + return super().lower().__contains__(other.lower()) + + def in_(self, other): + "Does self appear in other?" + return self in FoldedCase(other) + + # cache lower since it's likely to be called frequently. + @method_cache + def lower(self): + return super().lower() + + def index(self, sub): + return self.lower().index(sub.lower()) + + def split(self, splitter=' ', maxsplit=0): + pattern = re.compile(re.escape(splitter), re.I) + return pattern.split(self, maxsplit) diff --git a/backend/test/lib/python3.8/site-packages/importlib_metadata/py.typed b/backend/test/lib/python3.8/site-packages/importlib_metadata/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/LICENSE.rst b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/LICENSE.rst new file mode 100644 index 0000000000000000000000000000000000000000..7b190ca6712aa09eede3e6de79f68d7fa29072da --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2011 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..1d935ed3de4a0b303bda9abc9d0c4ac108921902 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/METADATA @@ -0,0 +1,97 @@ +Metadata-Version: 2.1 +Name: itsdangerous +Version: 2.1.2 +Summary: Safely pass data to untrusted environments and back. +Home-page: https://palletsprojects.com/p/itsdangerous/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +Maintainer: Pallets +Maintainer-email: contact@palletsprojects.com +License: BSD-3-Clause +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Documentation, https://itsdangerous.palletsprojects.com/ +Project-URL: Changes, https://itsdangerous.palletsprojects.com/changes/ +Project-URL: Source Code, https://github.com/pallets/itsdangerous/ +Project-URL: Issue Tracker, https://github.com/pallets/itsdangerous/issues/ +Project-URL: Twitter, https://twitter.com/PalletsTeam +Project-URL: Chat, https://discord.gg/pallets +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE.rst + +ItsDangerous +============ + +... so better sign this + +Various helpers to pass data to untrusted environments and to get it +back safe and sound. Data is cryptographically signed to ensure that a +token has not been tampered with. + +It's possible to customize how data is serialized. Data is compressed as +needed. A timestamp can be added and verified automatically while +loading a token. + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U itsdangerous + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +Here's how you could generate a token for transmitting a user's id and +name between web requests. + +.. code-block:: python + + from itsdangerous import URLSafeSerializer + auth_s = URLSafeSerializer("secret key", "auth") + token = auth_s.dumps({"id": 5, "name": "itsdangerous"}) + + print(token) + # eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg + + data = auth_s.loads(token) + print(data["name"]) + # itsdangerous + + +Donate +------ + +The Pallets organization develops and supports ItsDangerous and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://itsdangerous.palletsprojects.com/ +- Changes: https://itsdangerous.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/ItsDangerous/ +- Source Code: https://github.com/pallets/itsdangerous/ +- Issue Tracker: https://github.com/pallets/itsdangerous/issues/ +- Website: https://palletsprojects.com/p/itsdangerous/ +- Twitter: https://twitter.com/PalletsTeam +- Chat: https://discord.gg/pallets + + diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..bc9d6e64119259f38c3c85136eb820d765b40482 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/RECORD @@ -0,0 +1,23 @@ +itsdangerous-2.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +itsdangerous-2.1.2.dist-info/LICENSE.rst,sha256=Y68JiRtr6K0aQlLtQ68PTvun_JSOIoNnvtfzxa4LCdc,1475 +itsdangerous-2.1.2.dist-info/METADATA,sha256=ThrHIJQ_6XlfbDMCAVe_hawT7IXiIxnTBIDrwxxtucQ,2928 +itsdangerous-2.1.2.dist-info/RECORD,, +itsdangerous-2.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 +itsdangerous-2.1.2.dist-info/top_level.txt,sha256=gKN1OKLk81i7fbWWildJA88EQ9NhnGMSvZqhfz9ICjk,13 +itsdangerous/__init__.py,sha256=n4mkyjlIVn23pgsgCIw0MJKPdcHIetyeRpe5Fwsn8qg,876 +itsdangerous/__pycache__/__init__.cpython-38.pyc,, +itsdangerous/__pycache__/_json.cpython-38.pyc,, +itsdangerous/__pycache__/encoding.cpython-38.pyc,, +itsdangerous/__pycache__/exc.cpython-38.pyc,, +itsdangerous/__pycache__/serializer.cpython-38.pyc,, +itsdangerous/__pycache__/signer.cpython-38.pyc,, +itsdangerous/__pycache__/timed.cpython-38.pyc,, +itsdangerous/__pycache__/url_safe.cpython-38.pyc,, +itsdangerous/_json.py,sha256=wIhs_7-_XZolmyr-JvKNiy_LgAcfevYR0qhCVdlIhg8,450 +itsdangerous/encoding.py,sha256=pgh86snHC76dPLNCnPlrjR5SaYL_M8H-gWRiiLNbhCU,1419 +itsdangerous/exc.py,sha256=VFxmP2lMoSJFqxNMzWonqs35ROII4-fvCBfG0v1Tkbs,3206 +itsdangerous/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +itsdangerous/serializer.py,sha256=zgZ1-U705jHDpt62x_pmLJdryEKDNAbt5UkJtnkcCSw,11144 +itsdangerous/signer.py,sha256=QUH0iX0in-OTptMAXKU5zWMwmOCXn1fsDsubXiGdFN4,9367 +itsdangerous/timed.py,sha256=5CBWLds4Nm8-3bFVC8RxNzFjx6PSwjch8wuZ5cwcHFI,8174 +itsdangerous/url_safe.py,sha256=5bC4jSKOjWNRkWrFseifWVXUnHnPgwOLROjiOwb-eeo,2402 diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..becc9a66ea739ba941d48a749e248761cc6e658a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.1) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..e163955e80928012f0c46f3d2eca000966af4f93 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous-2.1.2.dist-info/top_level.txt @@ -0,0 +1 @@ +itsdangerous diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/__init__.py b/backend/test/lib/python3.8/site-packages/itsdangerous/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fdb2dfd00ac13a004f58940e7ee617129a793bb7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous/__init__.py @@ -0,0 +1,19 @@ +from .encoding import base64_decode as base64_decode +from .encoding import base64_encode as base64_encode +from .encoding import want_bytes as want_bytes +from .exc import BadData as BadData +from .exc import BadHeader as BadHeader +from .exc import BadPayload as BadPayload +from .exc import BadSignature as BadSignature +from .exc import BadTimeSignature as BadTimeSignature +from .exc import SignatureExpired as SignatureExpired +from .serializer import Serializer as Serializer +from .signer import HMACAlgorithm as HMACAlgorithm +from .signer import NoneAlgorithm as NoneAlgorithm +from .signer import Signer as Signer +from .timed import TimedSerializer as TimedSerializer +from .timed import TimestampSigner as TimestampSigner +from .url_safe import URLSafeSerializer as URLSafeSerializer +from .url_safe import URLSafeTimedSerializer as URLSafeTimedSerializer + +__version__ = "2.1.2" diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..20780cfad9cd922d6e99c36bfbe79c9c7d807d95 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/_json.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/_json.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..29e0ae26ce3605bf8414225845c86ed947fd24dd Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/_json.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/encoding.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/encoding.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9c9b491d46e96a480a846e8eea4b791cc16dd77a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/encoding.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/exc.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/exc.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..47ad4c2cd2acb60fe55b000bec9d64c3b31eb3a1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/exc.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/serializer.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/serializer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..76a6ddca06630c95ddcdd7180f798359ab219810 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/serializer.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/signer.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/signer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a6f624981e335ccc9b13efc509f32ee127fdf05b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/signer.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/timed.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/timed.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55697d1893fb3c7f67c7e18cddf91206fd3b8cad Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/timed.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/url_safe.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/url_safe.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..659809280853b3c8365e002c56afea680d0afc3a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/itsdangerous/__pycache__/url_safe.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/_json.py b/backend/test/lib/python3.8/site-packages/itsdangerous/_json.py new file mode 100644 index 0000000000000000000000000000000000000000..c70d37a958bb59771f993e880597adc61de5161c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous/_json.py @@ -0,0 +1,16 @@ +import json as _json +import typing as _t + + +class _CompactJSON: + """Wrapper around json module that strips whitespace.""" + + @staticmethod + def loads(payload: _t.Union[str, bytes]) -> _t.Any: + return _json.loads(payload) + + @staticmethod + def dumps(obj: _t.Any, **kwargs: _t.Any) -> str: + kwargs.setdefault("ensure_ascii", False) + kwargs.setdefault("separators", (",", ":")) + return _json.dumps(obj, **kwargs) diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/encoding.py b/backend/test/lib/python3.8/site-packages/itsdangerous/encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..edb04d1a632e246f918cbe06cf95ab3ef394c345 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous/encoding.py @@ -0,0 +1,54 @@ +import base64 +import string +import struct +import typing as _t + +from .exc import BadData + +_t_str_bytes = _t.Union[str, bytes] + + +def want_bytes( + s: _t_str_bytes, encoding: str = "utf-8", errors: str = "strict" +) -> bytes: + if isinstance(s, str): + s = s.encode(encoding, errors) + + return s + + +def base64_encode(string: _t_str_bytes) -> bytes: + """Base64 encode a string of bytes or text. The resulting bytes are + safe to use in URLs. + """ + string = want_bytes(string) + return base64.urlsafe_b64encode(string).rstrip(b"=") + + +def base64_decode(string: _t_str_bytes) -> bytes: + """Base64 decode a URL-safe string of bytes or text. The result is + bytes. + """ + string = want_bytes(string, encoding="ascii", errors="ignore") + string += b"=" * (-len(string) % 4) + + try: + return base64.urlsafe_b64decode(string) + except (TypeError, ValueError) as e: + raise BadData("Invalid base64-encoded data") from e + + +# The alphabet used by base64.urlsafe_* +_base64_alphabet = f"{string.ascii_letters}{string.digits}-_=".encode("ascii") + +_int64_struct = struct.Struct(">Q") +_int_to_bytes = _int64_struct.pack +_bytes_to_int = _t.cast("_t.Callable[[bytes], _t.Tuple[int]]", _int64_struct.unpack) + + +def int_to_bytes(num: int) -> bytes: + return _int_to_bytes(num).lstrip(b"\x00") + + +def bytes_to_int(bytestr: bytes) -> int: + return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0] diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/exc.py b/backend/test/lib/python3.8/site-packages/itsdangerous/exc.py new file mode 100644 index 0000000000000000000000000000000000000000..c38a6af5205520f0d859df52920b54bd55680cdd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous/exc.py @@ -0,0 +1,107 @@ +import typing as _t +from datetime import datetime + +_t_opt_any = _t.Optional[_t.Any] +_t_opt_exc = _t.Optional[Exception] + + +class BadData(Exception): + """Raised if bad data of any sort was encountered. This is the base + for all exceptions that ItsDangerous defines. + + .. versionadded:: 0.15 + """ + + def __init__(self, message: str): + super().__init__(message) + self.message = message + + def __str__(self) -> str: + return self.message + + +class BadSignature(BadData): + """Raised if a signature does not match.""" + + def __init__(self, message: str, payload: _t_opt_any = None): + super().__init__(message) + + #: The payload that failed the signature test. In some + #: situations you might still want to inspect this, even if + #: you know it was tampered with. + #: + #: .. versionadded:: 0.14 + self.payload: _t_opt_any = payload + + +class BadTimeSignature(BadSignature): + """Raised if a time-based signature is invalid. This is a subclass + of :class:`BadSignature`. + """ + + def __init__( + self, + message: str, + payload: _t_opt_any = None, + date_signed: _t.Optional[datetime] = None, + ): + super().__init__(message, payload) + + #: If the signature expired this exposes the date of when the + #: signature was created. This can be helpful in order to + #: tell the user how long a link has been gone stale. + #: + #: .. versionchanged:: 2.0 + #: The datetime value is timezone-aware rather than naive. + #: + #: .. versionadded:: 0.14 + self.date_signed = date_signed + + +class SignatureExpired(BadTimeSignature): + """Raised if a signature timestamp is older than ``max_age``. This + is a subclass of :exc:`BadTimeSignature`. + """ + + +class BadHeader(BadSignature): + """Raised if a signed header is invalid in some form. This only + happens for serializers that have a header that goes with the + signature. + + .. versionadded:: 0.24 + """ + + def __init__( + self, + message: str, + payload: _t_opt_any = None, + header: _t_opt_any = None, + original_error: _t_opt_exc = None, + ): + super().__init__(message, payload) + + #: If the header is actually available but just malformed it + #: might be stored here. + self.header: _t_opt_any = header + + #: If available, the error that indicates why the payload was + #: not valid. This might be ``None``. + self.original_error: _t_opt_exc = original_error + + +class BadPayload(BadData): + """Raised if a payload is invalid. This could happen if the payload + is loaded despite an invalid signature, or if there is a mismatch + between the serializer and deserializer. The original exception + that occurred during loading is stored on as :attr:`original_error`. + + .. versionadded:: 0.15 + """ + + def __init__(self, message: str, original_error: _t_opt_exc = None): + super().__init__(message) + + #: If available, the error that indicates why the payload was + #: not valid. This might be ``None``. + self.original_error: _t_opt_exc = original_error diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/py.typed b/backend/test/lib/python3.8/site-packages/itsdangerous/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/serializer.py b/backend/test/lib/python3.8/site-packages/itsdangerous/serializer.py new file mode 100644 index 0000000000000000000000000000000000000000..9f4a84a172cfe852d2eefd46fd9942f20aeee82a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous/serializer.py @@ -0,0 +1,295 @@ +import json +import typing as _t + +from .encoding import want_bytes +from .exc import BadPayload +from .exc import BadSignature +from .signer import _make_keys_list +from .signer import Signer + +_t_str_bytes = _t.Union[str, bytes] +_t_opt_str_bytes = _t.Optional[_t_str_bytes] +_t_kwargs = _t.Dict[str, _t.Any] +_t_opt_kwargs = _t.Optional[_t_kwargs] +_t_signer = _t.Type[Signer] +_t_fallbacks = _t.List[_t.Union[_t_kwargs, _t.Tuple[_t_signer, _t_kwargs], _t_signer]] +_t_load_unsafe = _t.Tuple[bool, _t.Any] +_t_secret_key = _t.Union[_t.Iterable[_t_str_bytes], _t_str_bytes] + + +def is_text_serializer(serializer: _t.Any) -> bool: + """Checks whether a serializer generates text or binary.""" + return isinstance(serializer.dumps({}), str) + + +class Serializer: + """A serializer wraps a :class:`~itsdangerous.signer.Signer` to + enable serializing and securely signing data other than bytes. It + can unsign to verify that the data hasn't been changed. + + The serializer provides :meth:`dumps` and :meth:`loads`, similar to + :mod:`json`, and by default uses :mod:`json` internally to serialize + the data to bytes. + + The secret key should be a random string of ``bytes`` and should not + be saved to code or version control. Different salts should be used + to distinguish signing in different contexts. See :doc:`/concepts` + for information about the security of the secret key and salt. + + :param secret_key: The secret key to sign and verify with. Can be a + list of keys, oldest to newest, to support key rotation. + :param salt: Extra key to combine with ``secret_key`` to distinguish + signatures in different contexts. + :param serializer: An object that provides ``dumps`` and ``loads`` + methods for serializing data to a string. Defaults to + :attr:`default_serializer`, which defaults to :mod:`json`. + :param serializer_kwargs: Keyword arguments to pass when calling + ``serializer.dumps``. + :param signer: A ``Signer`` class to instantiate when signing data. + Defaults to :attr:`default_signer`, which defaults to + :class:`~itsdangerous.signer.Signer`. + :param signer_kwargs: Keyword arguments to pass when instantiating + the ``Signer`` class. + :param fallback_signers: List of signer parameters to try when + unsigning with the default signer fails. Each item can be a dict + of ``signer_kwargs``, a ``Signer`` class, or a tuple of + ``(signer, signer_kwargs)``. Defaults to + :attr:`default_fallback_signers`. + + .. versionchanged:: 2.0 + Added support for key rotation by passing a list to + ``secret_key``. + + .. versionchanged:: 2.0 + Removed the default SHA-512 fallback signer from + ``default_fallback_signers``. + + .. versionchanged:: 1.1 + Added support for ``fallback_signers`` and configured a default + SHA-512 fallback. This fallback is for users who used the yanked + 1.0.0 release which defaulted to SHA-512. + + .. versionchanged:: 0.14 + The ``signer`` and ``signer_kwargs`` parameters were added to + the constructor. + """ + + #: The default serialization module to use to serialize data to a + #: string internally. The default is :mod:`json`, but can be changed + #: to any object that provides ``dumps`` and ``loads`` methods. + default_serializer: _t.Any = json + + #: The default ``Signer`` class to instantiate when signing data. + #: The default is :class:`itsdangerous.signer.Signer`. + default_signer: _t_signer = Signer + + #: The default fallback signers to try when unsigning fails. + default_fallback_signers: _t_fallbacks = [] + + def __init__( + self, + secret_key: _t_secret_key, + salt: _t_opt_str_bytes = b"itsdangerous", + serializer: _t.Any = None, + serializer_kwargs: _t_opt_kwargs = None, + signer: _t.Optional[_t_signer] = None, + signer_kwargs: _t_opt_kwargs = None, + fallback_signers: _t.Optional[_t_fallbacks] = None, + ): + #: The list of secret keys to try for verifying signatures, from + #: oldest to newest. The newest (last) key is used for signing. + #: + #: This allows a key rotation system to keep a list of allowed + #: keys and remove expired ones. + self.secret_keys: _t.List[bytes] = _make_keys_list(secret_key) + + if salt is not None: + salt = want_bytes(salt) + # if salt is None then the signer's default is used + + self.salt = salt + + if serializer is None: + serializer = self.default_serializer + + self.serializer: _t.Any = serializer + self.is_text_serializer: bool = is_text_serializer(serializer) + + if signer is None: + signer = self.default_signer + + self.signer: _t_signer = signer + self.signer_kwargs: _t_kwargs = signer_kwargs or {} + + if fallback_signers is None: + fallback_signers = list(self.default_fallback_signers or ()) + + self.fallback_signers: _t_fallbacks = fallback_signers + self.serializer_kwargs: _t_kwargs = serializer_kwargs or {} + + @property + def secret_key(self) -> bytes: + """The newest (last) entry in the :attr:`secret_keys` list. This + is for compatibility from before key rotation support was added. + """ + return self.secret_keys[-1] + + def load_payload( + self, payload: bytes, serializer: _t.Optional[_t.Any] = None + ) -> _t.Any: + """Loads the encoded object. This function raises + :class:`.BadPayload` if the payload is not valid. The + ``serializer`` parameter can be used to override the serializer + stored on the class. The encoded ``payload`` should always be + bytes. + """ + if serializer is None: + serializer = self.serializer + is_text = self.is_text_serializer + else: + is_text = is_text_serializer(serializer) + + try: + if is_text: + return serializer.loads(payload.decode("utf-8")) + + return serializer.loads(payload) + except Exception as e: + raise BadPayload( + "Could not load the payload because an exception" + " occurred on unserializing the data.", + original_error=e, + ) from e + + def dump_payload(self, obj: _t.Any) -> bytes: + """Dumps the encoded object. The return value is always bytes. + If the internal serializer returns text, the value will be + encoded as UTF-8. + """ + return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs)) + + def make_signer(self, salt: _t_opt_str_bytes = None) -> Signer: + """Creates a new instance of the signer to be used. The default + implementation uses the :class:`.Signer` base class. + """ + if salt is None: + salt = self.salt + + return self.signer(self.secret_keys, salt=salt, **self.signer_kwargs) + + def iter_unsigners(self, salt: _t_opt_str_bytes = None) -> _t.Iterator[Signer]: + """Iterates over all signers to be tried for unsigning. Starts + with the configured signer, then constructs each signer + specified in ``fallback_signers``. + """ + if salt is None: + salt = self.salt + + yield self.make_signer(salt) + + for fallback in self.fallback_signers: + if isinstance(fallback, dict): + kwargs = fallback + fallback = self.signer + elif isinstance(fallback, tuple): + fallback, kwargs = fallback + else: + kwargs = self.signer_kwargs + + for secret_key in self.secret_keys: + yield fallback(secret_key, salt=salt, **kwargs) + + def dumps(self, obj: _t.Any, salt: _t_opt_str_bytes = None) -> _t_str_bytes: + """Returns a signed string serialized with the internal + serializer. The return value can be either a byte or unicode + string depending on the format of the internal serializer. + """ + payload = want_bytes(self.dump_payload(obj)) + rv = self.make_signer(salt).sign(payload) + + if self.is_text_serializer: + return rv.decode("utf-8") + + return rv + + def dump(self, obj: _t.Any, f: _t.IO, salt: _t_opt_str_bytes = None) -> None: + """Like :meth:`dumps` but dumps into a file. The file handle has + to be compatible with what the internal serializer expects. + """ + f.write(self.dumps(obj, salt)) + + def loads( + self, s: _t_str_bytes, salt: _t_opt_str_bytes = None, **kwargs: _t.Any + ) -> _t.Any: + """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the + signature validation fails. + """ + s = want_bytes(s) + last_exception = None + + for signer in self.iter_unsigners(salt): + try: + return self.load_payload(signer.unsign(s)) + except BadSignature as err: + last_exception = err + + raise _t.cast(BadSignature, last_exception) + + def load(self, f: _t.IO, salt: _t_opt_str_bytes = None) -> _t.Any: + """Like :meth:`loads` but loads from a file.""" + return self.loads(f.read(), salt) + + def loads_unsafe( + self, s: _t_str_bytes, salt: _t_opt_str_bytes = None + ) -> _t_load_unsafe: + """Like :meth:`loads` but without verifying the signature. This + is potentially very dangerous to use depending on how your + serializer works. The return value is ``(signature_valid, + payload)`` instead of just the payload. The first item will be a + boolean that indicates if the signature is valid. This function + never fails. + + Use it for debugging only and if you know that your serializer + module is not exploitable (for example, do not use it with a + pickle serializer). + + .. versionadded:: 0.15 + """ + return self._loads_unsafe_impl(s, salt) + + def _loads_unsafe_impl( + self, + s: _t_str_bytes, + salt: _t_opt_str_bytes, + load_kwargs: _t_opt_kwargs = None, + load_payload_kwargs: _t_opt_kwargs = None, + ) -> _t_load_unsafe: + """Low level helper function to implement :meth:`loads_unsafe` + in serializer subclasses. + """ + if load_kwargs is None: + load_kwargs = {} + + try: + return True, self.loads(s, salt=salt, **load_kwargs) + except BadSignature as e: + if e.payload is None: + return False, None + + if load_payload_kwargs is None: + load_payload_kwargs = {} + + try: + return ( + False, + self.load_payload(e.payload, **load_payload_kwargs), + ) + except BadPayload: + return False, None + + def load_unsafe(self, f: _t.IO, salt: _t_opt_str_bytes = None) -> _t_load_unsafe: + """Like :meth:`loads_unsafe` but loads from a file. + + .. versionadded:: 0.15 + """ + return self.loads_unsafe(f.read(), salt=salt) diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/signer.py b/backend/test/lib/python3.8/site-packages/itsdangerous/signer.py new file mode 100644 index 0000000000000000000000000000000000000000..aa12005e9af95133ebada8e0e77da77f3b924db8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous/signer.py @@ -0,0 +1,257 @@ +import hashlib +import hmac +import typing as _t + +from .encoding import _base64_alphabet +from .encoding import base64_decode +from .encoding import base64_encode +from .encoding import want_bytes +from .exc import BadSignature + +_t_str_bytes = _t.Union[str, bytes] +_t_opt_str_bytes = _t.Optional[_t_str_bytes] +_t_secret_key = _t.Union[_t.Iterable[_t_str_bytes], _t_str_bytes] + + +class SigningAlgorithm: + """Subclasses must implement :meth:`get_signature` to provide + signature generation functionality. + """ + + def get_signature(self, key: bytes, value: bytes) -> bytes: + """Returns the signature for the given key and value.""" + raise NotImplementedError() + + def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: + """Verifies the given signature matches the expected + signature. + """ + return hmac.compare_digest(sig, self.get_signature(key, value)) + + +class NoneAlgorithm(SigningAlgorithm): + """Provides an algorithm that does not perform any signing and + returns an empty signature. + """ + + def get_signature(self, key: bytes, value: bytes) -> bytes: + return b"" + + +class HMACAlgorithm(SigningAlgorithm): + """Provides signature generation using HMACs.""" + + #: The digest method to use with the MAC algorithm. This defaults to + #: SHA1, but can be changed to any other function in the hashlib + #: module. + default_digest_method: _t.Any = staticmethod(hashlib.sha1) + + def __init__(self, digest_method: _t.Any = None): + if digest_method is None: + digest_method = self.default_digest_method + + self.digest_method: _t.Any = digest_method + + def get_signature(self, key: bytes, value: bytes) -> bytes: + mac = hmac.new(key, msg=value, digestmod=self.digest_method) + return mac.digest() + + +def _make_keys_list(secret_key: _t_secret_key) -> _t.List[bytes]: + if isinstance(secret_key, (str, bytes)): + return [want_bytes(secret_key)] + + return [want_bytes(s) for s in secret_key] + + +class Signer: + """A signer securely signs bytes, then unsigns them to verify that + the value hasn't been changed. + + The secret key should be a random string of ``bytes`` and should not + be saved to code or version control. Different salts should be used + to distinguish signing in different contexts. See :doc:`/concepts` + for information about the security of the secret key and salt. + + :param secret_key: The secret key to sign and verify with. Can be a + list of keys, oldest to newest, to support key rotation. + :param salt: Extra key to combine with ``secret_key`` to distinguish + signatures in different contexts. + :param sep: Separator between the signature and value. + :param key_derivation: How to derive the signing key from the secret + key and salt. Possible values are ``concat``, ``django-concat``, + or ``hmac``. Defaults to :attr:`default_key_derivation`, which + defaults to ``django-concat``. + :param digest_method: Hash function to use when generating the HMAC + signature. Defaults to :attr:`default_digest_method`, which + defaults to :func:`hashlib.sha1`. Note that the security of the + hash alone doesn't apply when used intermediately in HMAC. + :param algorithm: A :class:`SigningAlgorithm` instance to use + instead of building a default :class:`HMACAlgorithm` with the + ``digest_method``. + + .. versionchanged:: 2.0 + Added support for key rotation by passing a list to + ``secret_key``. + + .. versionchanged:: 0.18 + ``algorithm`` was added as an argument to the class constructor. + + .. versionchanged:: 0.14 + ``key_derivation`` and ``digest_method`` were added as arguments + to the class constructor. + """ + + #: The default digest method to use for the signer. The default is + #: :func:`hashlib.sha1`, but can be changed to any :mod:`hashlib` or + #: compatible object. Note that the security of the hash alone + #: doesn't apply when used intermediately in HMAC. + #: + #: .. versionadded:: 0.14 + default_digest_method: _t.Any = staticmethod(hashlib.sha1) + + #: The default scheme to use to derive the signing key from the + #: secret key and salt. The default is ``django-concat``. Possible + #: values are ``concat``, ``django-concat``, and ``hmac``. + #: + #: .. versionadded:: 0.14 + default_key_derivation: str = "django-concat" + + def __init__( + self, + secret_key: _t_secret_key, + salt: _t_opt_str_bytes = b"itsdangerous.Signer", + sep: _t_str_bytes = b".", + key_derivation: _t.Optional[str] = None, + digest_method: _t.Optional[_t.Any] = None, + algorithm: _t.Optional[SigningAlgorithm] = None, + ): + #: The list of secret keys to try for verifying signatures, from + #: oldest to newest. The newest (last) key is used for signing. + #: + #: This allows a key rotation system to keep a list of allowed + #: keys and remove expired ones. + self.secret_keys: _t.List[bytes] = _make_keys_list(secret_key) + self.sep: bytes = want_bytes(sep) + + if self.sep in _base64_alphabet: + raise ValueError( + "The given separator cannot be used because it may be" + " contained in the signature itself. ASCII letters," + " digits, and '-_=' must not be used." + ) + + if salt is not None: + salt = want_bytes(salt) + else: + salt = b"itsdangerous.Signer" + + self.salt = salt + + if key_derivation is None: + key_derivation = self.default_key_derivation + + self.key_derivation: str = key_derivation + + if digest_method is None: + digest_method = self.default_digest_method + + self.digest_method: _t.Any = digest_method + + if algorithm is None: + algorithm = HMACAlgorithm(self.digest_method) + + self.algorithm: SigningAlgorithm = algorithm + + @property + def secret_key(self) -> bytes: + """The newest (last) entry in the :attr:`secret_keys` list. This + is for compatibility from before key rotation support was added. + """ + return self.secret_keys[-1] + + def derive_key(self, secret_key: _t_opt_str_bytes = None) -> bytes: + """This method is called to derive the key. The default key + derivation choices can be overridden here. Key derivation is not + intended to be used as a security method to make a complex key + out of a short password. Instead you should use large random + secret keys. + + :param secret_key: A specific secret key to derive from. + Defaults to the last item in :attr:`secret_keys`. + + .. versionchanged:: 2.0 + Added the ``secret_key`` parameter. + """ + if secret_key is None: + secret_key = self.secret_keys[-1] + else: + secret_key = want_bytes(secret_key) + + if self.key_derivation == "concat": + return _t.cast(bytes, self.digest_method(self.salt + secret_key).digest()) + elif self.key_derivation == "django-concat": + return _t.cast( + bytes, self.digest_method(self.salt + b"signer" + secret_key).digest() + ) + elif self.key_derivation == "hmac": + mac = hmac.new(secret_key, digestmod=self.digest_method) + mac.update(self.salt) + return mac.digest() + elif self.key_derivation == "none": + return secret_key + else: + raise TypeError("Unknown key derivation method") + + def get_signature(self, value: _t_str_bytes) -> bytes: + """Returns the signature for the given value.""" + value = want_bytes(value) + key = self.derive_key() + sig = self.algorithm.get_signature(key, value) + return base64_encode(sig) + + def sign(self, value: _t_str_bytes) -> bytes: + """Signs the given string.""" + value = want_bytes(value) + return value + self.sep + self.get_signature(value) + + def verify_signature(self, value: _t_str_bytes, sig: _t_str_bytes) -> bool: + """Verifies the signature for the given value.""" + try: + sig = base64_decode(sig) + except Exception: + return False + + value = want_bytes(value) + + for secret_key in reversed(self.secret_keys): + key = self.derive_key(secret_key) + + if self.algorithm.verify_signature(key, value, sig): + return True + + return False + + def unsign(self, signed_value: _t_str_bytes) -> bytes: + """Unsigns the given string.""" + signed_value = want_bytes(signed_value) + + if self.sep not in signed_value: + raise BadSignature(f"No {self.sep!r} found in value") + + value, sig = signed_value.rsplit(self.sep, 1) + + if self.verify_signature(value, sig): + return value + + raise BadSignature(f"Signature {sig!r} does not match", payload=value) + + def validate(self, signed_value: _t_str_bytes) -> bool: + """Only validates the given signed value. Returns ``True`` if + the signature exists and is valid. + """ + try: + self.unsign(signed_value) + return True + except BadSignature: + return False diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/timed.py b/backend/test/lib/python3.8/site-packages/itsdangerous/timed.py new file mode 100644 index 0000000000000000000000000000000000000000..cad8da341c72bbf0b92ff37746d846fbd17317f4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous/timed.py @@ -0,0 +1,234 @@ +import time +import typing +import typing as _t +from datetime import datetime +from datetime import timezone + +from .encoding import base64_decode +from .encoding import base64_encode +from .encoding import bytes_to_int +from .encoding import int_to_bytes +from .encoding import want_bytes +from .exc import BadSignature +from .exc import BadTimeSignature +from .exc import SignatureExpired +from .serializer import Serializer +from .signer import Signer + +_t_str_bytes = _t.Union[str, bytes] +_t_opt_str_bytes = _t.Optional[_t_str_bytes] +_t_opt_int = _t.Optional[int] + +if _t.TYPE_CHECKING: + import typing_extensions as _te + + +class TimestampSigner(Signer): + """Works like the regular :class:`.Signer` but also records the time + of the signing and can be used to expire signatures. The + :meth:`unsign` method can raise :exc:`.SignatureExpired` if the + unsigning failed because the signature is expired. + """ + + def get_timestamp(self) -> int: + """Returns the current timestamp. The function must return an + integer. + """ + return int(time.time()) + + def timestamp_to_datetime(self, ts: int) -> datetime: + """Convert the timestamp from :meth:`get_timestamp` into an + aware :class`datetime.datetime` in UTC. + + .. versionchanged:: 2.0 + The timestamp is returned as a timezone-aware ``datetime`` + in UTC rather than a naive ``datetime`` assumed to be UTC. + """ + return datetime.fromtimestamp(ts, tz=timezone.utc) + + def sign(self, value: _t_str_bytes) -> bytes: + """Signs the given string and also attaches time information.""" + value = want_bytes(value) + timestamp = base64_encode(int_to_bytes(self.get_timestamp())) + sep = want_bytes(self.sep) + value = value + sep + timestamp + return value + sep + self.get_signature(value) + + # Ignore overlapping signatures check, return_timestamp is the only + # parameter that affects the return type. + + @typing.overload + def unsign( # type: ignore + self, + signed_value: _t_str_bytes, + max_age: _t_opt_int = None, + return_timestamp: "_te.Literal[False]" = False, + ) -> bytes: + ... + + @typing.overload + def unsign( + self, + signed_value: _t_str_bytes, + max_age: _t_opt_int = None, + return_timestamp: "_te.Literal[True]" = True, + ) -> _t.Tuple[bytes, datetime]: + ... + + def unsign( + self, + signed_value: _t_str_bytes, + max_age: _t_opt_int = None, + return_timestamp: bool = False, + ) -> _t.Union[_t.Tuple[bytes, datetime], bytes]: + """Works like the regular :meth:`.Signer.unsign` but can also + validate the time. See the base docstring of the class for + the general behavior. If ``return_timestamp`` is ``True`` the + timestamp of the signature will be returned as an aware + :class:`datetime.datetime` object in UTC. + + .. versionchanged:: 2.0 + The timestamp is returned as a timezone-aware ``datetime`` + in UTC rather than a naive ``datetime`` assumed to be UTC. + """ + try: + result = super().unsign(signed_value) + sig_error = None + except BadSignature as e: + sig_error = e + result = e.payload or b"" + + sep = want_bytes(self.sep) + + # If there is no timestamp in the result there is something + # seriously wrong. In case there was a signature error, we raise + # that one directly, otherwise we have a weird situation in + # which we shouldn't have come except someone uses a time-based + # serializer on non-timestamp data, so catch that. + if sep not in result: + if sig_error: + raise sig_error + + raise BadTimeSignature("timestamp missing", payload=result) + + value, ts_bytes = result.rsplit(sep, 1) + ts_int: _t_opt_int = None + ts_dt: _t.Optional[datetime] = None + + try: + ts_int = bytes_to_int(base64_decode(ts_bytes)) + except Exception: + pass + + # Signature is *not* okay. Raise a proper error now that we have + # split the value and the timestamp. + if sig_error is not None: + if ts_int is not None: + try: + ts_dt = self.timestamp_to_datetime(ts_int) + except (ValueError, OSError, OverflowError) as exc: + # Windows raises OSError + # 32-bit raises OverflowError + raise BadTimeSignature( + "Malformed timestamp", payload=value + ) from exc + + raise BadTimeSignature(str(sig_error), payload=value, date_signed=ts_dt) + + # Signature was okay but the timestamp is actually not there or + # malformed. Should not happen, but we handle it anyway. + if ts_int is None: + raise BadTimeSignature("Malformed timestamp", payload=value) + + # Check timestamp is not older than max_age + if max_age is not None: + age = self.get_timestamp() - ts_int + + if age > max_age: + raise SignatureExpired( + f"Signature age {age} > {max_age} seconds", + payload=value, + date_signed=self.timestamp_to_datetime(ts_int), + ) + + if age < 0: + raise SignatureExpired( + f"Signature age {age} < 0 seconds", + payload=value, + date_signed=self.timestamp_to_datetime(ts_int), + ) + + if return_timestamp: + return value, self.timestamp_to_datetime(ts_int) + + return value + + def validate(self, signed_value: _t_str_bytes, max_age: _t_opt_int = None) -> bool: + """Only validates the given signed value. Returns ``True`` if + the signature exists and is valid.""" + try: + self.unsign(signed_value, max_age=max_age) + return True + except BadSignature: + return False + + +class TimedSerializer(Serializer): + """Uses :class:`TimestampSigner` instead of the default + :class:`.Signer`. + """ + + default_signer: _t.Type[TimestampSigner] = TimestampSigner + + def iter_unsigners( + self, salt: _t_opt_str_bytes = None + ) -> _t.Iterator[TimestampSigner]: + return _t.cast("_t.Iterator[TimestampSigner]", super().iter_unsigners(salt)) + + # TODO: Signature is incompatible because parameters were added + # before salt. + + def loads( # type: ignore + self, + s: _t_str_bytes, + max_age: _t_opt_int = None, + return_timestamp: bool = False, + salt: _t_opt_str_bytes = None, + ) -> _t.Any: + """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the + signature validation fails. If a ``max_age`` is provided it will + ensure the signature is not older than that time in seconds. In + case the signature is outdated, :exc:`.SignatureExpired` is + raised. All arguments are forwarded to the signer's + :meth:`~TimestampSigner.unsign` method. + """ + s = want_bytes(s) + last_exception = None + + for signer in self.iter_unsigners(salt): + try: + base64d, timestamp = signer.unsign( + s, max_age=max_age, return_timestamp=True + ) + payload = self.load_payload(base64d) + + if return_timestamp: + return payload, timestamp + + return payload + except SignatureExpired: + # The signature was unsigned successfully but was + # expired. Do not try the next signer. + raise + except BadSignature as err: + last_exception = err + + raise _t.cast(BadSignature, last_exception) + + def loads_unsafe( # type: ignore + self, + s: _t_str_bytes, + max_age: _t_opt_int = None, + salt: _t_opt_str_bytes = None, + ) -> _t.Tuple[bool, _t.Any]: + return self._loads_unsafe_impl(s, salt, load_kwargs={"max_age": max_age}) diff --git a/backend/test/lib/python3.8/site-packages/itsdangerous/url_safe.py b/backend/test/lib/python3.8/site-packages/itsdangerous/url_safe.py new file mode 100644 index 0000000000000000000000000000000000000000..d5a9b0c26641a190531c6135b84b2aa9b37b4bf3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/itsdangerous/url_safe.py @@ -0,0 +1,80 @@ +import typing as _t +import zlib + +from ._json import _CompactJSON +from .encoding import base64_decode +from .encoding import base64_encode +from .exc import BadPayload +from .serializer import Serializer +from .timed import TimedSerializer + + +class URLSafeSerializerMixin(Serializer): + """Mixed in with a regular serializer it will attempt to zlib + compress the string to make it shorter if necessary. It will also + base64 encode the string so that it can safely be placed in a URL. + """ + + default_serializer = _CompactJSON + + def load_payload( + self, + payload: bytes, + *args: _t.Any, + serializer: _t.Optional[_t.Any] = None, + **kwargs: _t.Any, + ) -> _t.Any: + decompress = False + + if payload.startswith(b"."): + payload = payload[1:] + decompress = True + + try: + json = base64_decode(payload) + except Exception as e: + raise BadPayload( + "Could not base64 decode the payload because of an exception", + original_error=e, + ) from e + + if decompress: + try: + json = zlib.decompress(json) + except Exception as e: + raise BadPayload( + "Could not zlib decompress the payload before decoding the payload", + original_error=e, + ) from e + + return super().load_payload(json, *args, **kwargs) + + def dump_payload(self, obj: _t.Any) -> bytes: + json = super().dump_payload(obj) + is_compressed = False + compressed = zlib.compress(json) + + if len(compressed) < (len(json) - 1): + json = compressed + is_compressed = True + + base64d = base64_encode(json) + + if is_compressed: + base64d = b"." + base64d + + return base64d + + +class URLSafeSerializer(URLSafeSerializerMixin, Serializer): + """Works like :class:`.Serializer` but dumps and loads into a URL + safe string consisting of the upper and lowercase character of the + alphabet as well as ``'_'``, ``'-'`` and ``'.'``. + """ + + +class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer): + """Works like :class:`.TimedSerializer` but dumps and loads into a + URL safe string consisting of the upper and lowercase character of + the alphabet as well as ``'_'``, ``'-'`` and ``'.'``. + """ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__init__.py b/backend/test/lib/python3.8/site-packages/jinja2/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e32392679edd18917d0f218bfb316d5256203042 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/__init__.py @@ -0,0 +1,37 @@ +"""Jinja is a template engine written in pure Python. It provides a +non-XML syntax that supports inline expressions and an optional +sandboxed environment. +""" +from .bccache import BytecodeCache as BytecodeCache +from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache +from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache +from .environment import Environment as Environment +from .environment import Template as Template +from .exceptions import TemplateAssertionError as TemplateAssertionError +from .exceptions import TemplateError as TemplateError +from .exceptions import TemplateNotFound as TemplateNotFound +from .exceptions import TemplateRuntimeError as TemplateRuntimeError +from .exceptions import TemplatesNotFound as TemplatesNotFound +from .exceptions import TemplateSyntaxError as TemplateSyntaxError +from .exceptions import UndefinedError as UndefinedError +from .loaders import BaseLoader as BaseLoader +from .loaders import ChoiceLoader as ChoiceLoader +from .loaders import DictLoader as DictLoader +from .loaders import FileSystemLoader as FileSystemLoader +from .loaders import FunctionLoader as FunctionLoader +from .loaders import ModuleLoader as ModuleLoader +from .loaders import PackageLoader as PackageLoader +from .loaders import PrefixLoader as PrefixLoader +from .runtime import ChainableUndefined as ChainableUndefined +from .runtime import DebugUndefined as DebugUndefined +from .runtime import make_logging_undefined as make_logging_undefined +from .runtime import StrictUndefined as StrictUndefined +from .runtime import Undefined as Undefined +from .utils import clear_caches as clear_caches +from .utils import is_undefined as is_undefined +from .utils import pass_context as pass_context +from .utils import pass_environment as pass_environment +from .utils import pass_eval_context as pass_eval_context +from .utils import select_autoescape as select_autoescape + +__version__ = "3.1.2" diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b390b191cf557fa2824671acd2c4657ccc4c483e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/_identifier.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/_identifier.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cbb732ba0729106957fc6b0c14207cf961a6137f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/_identifier.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/async_utils.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/async_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ccd4b4180d4b5eeb510789692c894781353ab9c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/async_utils.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/bccache.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/bccache.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f45865f649875a8ba7f6af94549130593d2382cb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/bccache.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/compiler.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/compiler.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8183572aa4ca9c8b02773a2d2df089469161361c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/compiler.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/constants.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/constants.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9076768eede7b2fe5c943424e02e66d0af8ac4e5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/constants.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/debug.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/debug.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1b5dd42ae41288eacce54f1600b0ba067a46996a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/debug.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/defaults.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/defaults.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..756fa4285e95f9697f6e17c042dc4d37ed208cc6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/defaults.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/environment.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/environment.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7000c4b316eec7a843cfc85f836832f56590df79 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/environment.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/exceptions.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b4986340729ffb9f1a10e71b071f24c574a18f17 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/exceptions.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/ext.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/ext.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3fe8f77c8e79f48f99614eedaa74467140b6954f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/ext.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/filters.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/filters.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc12a42cc50b6f9362d2df004193d556d911fb93 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/filters.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/idtracking.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/idtracking.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6da8191fa0ec722fa10658e1c6daa759f4acf5a0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/idtracking.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/lexer.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/lexer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..896da224a75c50033600af74538f38ca5616decf Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/lexer.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/loaders.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/loaders.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7609d83323215052b4f6ffbbe8ce3b62276d4d09 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/loaders.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/meta.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/meta.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2c9f565d6a53fdb37c66d0f333a1a74de7b8e592 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/meta.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/nativetypes.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/nativetypes.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e983a55c8865a410d08539f37699e7ed03368e3d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/nativetypes.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/nodes.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/nodes.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c6c65ffdfaa063ee1c10ff36dfe3d3df13025a7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/nodes.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/optimizer.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/optimizer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da43ccecaf05af19e8ca474ca62b111134134687 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/optimizer.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/parser.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/parser.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b64617a1abae3f30dee43365c23a253156e94462 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/parser.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/runtime.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/runtime.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7935c8b35b2391e7422a68bd5d1b76628b1595a6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/runtime.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/sandbox.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/sandbox.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0053550633dce095b98c58fde549070fbbec0878 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/sandbox.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/tests.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/tests.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a62d9e7827206a25129254fed2cf8ab64d10784 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/tests.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/utils.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd95e65825c7dd82e247f80b9f9f553a6e5379f8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/utils.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/visitor.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/visitor.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5223d5427d82d482097f6d2d20db14caa3d5764 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/jinja2/__pycache__/visitor.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/_identifier.py b/backend/test/lib/python3.8/site-packages/jinja2/_identifier.py new file mode 100644 index 0000000000000000000000000000000000000000..928c1503c7d414a8a86bbf5a82c68d42cb089bd2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/_identifier.py @@ -0,0 +1,6 @@ +import re + +# generated by scripts/generate_identifier_pattern.py +pattern = re.compile( + r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 +) diff --git a/backend/test/lib/python3.8/site-packages/jinja2/async_utils.py b/backend/test/lib/python3.8/site-packages/jinja2/async_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1a4f3892cef1a53632476933f2ce2d86fc31b10a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/async_utils.py @@ -0,0 +1,84 @@ +import inspect +import typing as t +from functools import WRAPPER_ASSIGNMENTS +from functools import wraps + +from .utils import _PassArg +from .utils import pass_eval_context + +V = t.TypeVar("V") + + +def async_variant(normal_func): # type: ignore + def decorator(async_func): # type: ignore + pass_arg = _PassArg.from_obj(normal_func) + need_eval_context = pass_arg is None + + if pass_arg is _PassArg.environment: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].is_async) + + else: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].environment.is_async) + + # Take the doc and annotations from the sync function, but the + # name from the async function. Pallets-Sphinx-Themes + # build_function_directive expects __wrapped__ to point to the + # sync function. + async_func_attrs = ("__module__", "__name__", "__qualname__") + normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) + + @wraps(normal_func, assigned=normal_func_attrs) + @wraps(async_func, assigned=async_func_attrs, updated=()) + def wrapper(*args, **kwargs): # type: ignore + b = is_async(args) + + if need_eval_context: + args = args[1:] + + if b: + return async_func(*args, **kwargs) + + return normal_func(*args, **kwargs) + + if need_eval_context: + wrapper = pass_eval_context(wrapper) + + wrapper.jinja_async_variant = True + return wrapper + + return decorator + + +_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} + + +async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": + # Avoid a costly call to isawaitable + if type(value) in _common_primitives: + return t.cast("V", value) + + if inspect.isawaitable(value): + return await t.cast("t.Awaitable[V]", value) + + return t.cast("V", value) + + +async def auto_aiter( + iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> "t.AsyncIterator[V]": + if hasattr(iterable, "__aiter__"): + async for item in t.cast("t.AsyncIterable[V]", iterable): + yield item + else: + for item in t.cast("t.Iterable[V]", iterable): + yield item + + +async def auto_to_list( + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> t.List["V"]: + return [x async for x in auto_aiter(value)] diff --git a/backend/test/lib/python3.8/site-packages/jinja2/bccache.py b/backend/test/lib/python3.8/site-packages/jinja2/bccache.py new file mode 100644 index 0000000000000000000000000000000000000000..d0ddf56ef62b03cba6b6c5f9b94d819393f09d38 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/bccache.py @@ -0,0 +1,406 @@ +"""The optional bytecode cache system. This is useful if you have very +complex template situations and the compilation of all those templates +slows down your application too much. + +Situations where this is useful are often forking web applications that +are initialized on the first request. +""" +import errno +import fnmatch +import marshal +import os +import pickle +import stat +import sys +import tempfile +import typing as t +from hashlib import sha1 +from io import BytesIO +from types import CodeType + +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + + class _MemcachedClient(te.Protocol): + def get(self, key: str) -> bytes: + ... + + def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None: + ... + + +bc_version = 5 +# Magic bytes to identify Jinja bytecode cache files. Contains the +# Python major and minor version to avoid loading incompatible bytecode +# if a project upgrades its Python version. +bc_magic = ( + b"j2" + + pickle.dumps(bc_version, 2) + + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) +) + + +class Bucket: + """Buckets are used to store the bytecode for one template. It's created + and initialized by the bytecode cache and passed to the loading functions. + + The buckets get an internal checksum from the cache assigned and use this + to automatically reject outdated cache material. Individual bytecode + cache subclasses don't have to care about cache invalidation. + """ + + def __init__(self, environment: "Environment", key: str, checksum: str) -> None: + self.environment = environment + self.key = key + self.checksum = checksum + self.reset() + + def reset(self) -> None: + """Resets the bucket (unloads the bytecode).""" + self.code: t.Optional[CodeType] = None + + def load_bytecode(self, f: t.BinaryIO) -> None: + """Loads bytecode from a file or file like object.""" + # make sure the magic header is correct + magic = f.read(len(bc_magic)) + if magic != bc_magic: + self.reset() + return + # the source code of the file changed, we need to reload + checksum = pickle.load(f) + if self.checksum != checksum: + self.reset() + return + # if marshal_load fails then we need to reload + try: + self.code = marshal.load(f) + except (EOFError, ValueError, TypeError): + self.reset() + return + + def write_bytecode(self, f: t.IO[bytes]) -> None: + """Dump the bytecode into the file or file like object passed.""" + if self.code is None: + raise TypeError("can't write empty bucket") + f.write(bc_magic) + pickle.dump(self.checksum, f, 2) + marshal.dump(self.code, f) + + def bytecode_from_string(self, string: bytes) -> None: + """Load bytecode from bytes.""" + self.load_bytecode(BytesIO(string)) + + def bytecode_to_string(self) -> bytes: + """Return the bytecode as bytes.""" + out = BytesIO() + self.write_bytecode(out) + return out.getvalue() + + +class BytecodeCache: + """To implement your own bytecode cache you have to subclass this class + and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of + these methods are passed a :class:`~jinja2.bccache.Bucket`. + + A very basic bytecode cache that saves the bytecode on the file system:: + + from os import path + + class MyCache(BytecodeCache): + + def __init__(self, directory): + self.directory = directory + + def load_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + if path.exists(filename): + with open(filename, 'rb') as f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket): + filename = path.join(self.directory, bucket.key) + with open(filename, 'wb') as f: + bucket.write_bytecode(f) + + A more advanced version of a filesystem based bytecode cache is part of + Jinja. + """ + + def load_bytecode(self, bucket: Bucket) -> None: + """Subclasses have to override this method to load bytecode into a + bucket. If they are not able to find code in the cache for the + bucket, it must not do anything. + """ + raise NotImplementedError() + + def dump_bytecode(self, bucket: Bucket) -> None: + """Subclasses have to override this method to write the bytecode + from a bucket back to the cache. If it unable to do so it must not + fail silently but raise an exception. + """ + raise NotImplementedError() + + def clear(self) -> None: + """Clears the cache. This method is not used by Jinja but should be + implemented to allow applications to clear the bytecode cache used + by a particular environment. + """ + + def get_cache_key( + self, name: str, filename: t.Optional[t.Union[str]] = None + ) -> str: + """Returns the unique hash key for this template name.""" + hash = sha1(name.encode("utf-8")) + + if filename is not None: + hash.update(f"|{filename}".encode()) + + return hash.hexdigest() + + def get_source_checksum(self, source: str) -> str: + """Returns a checksum for the source.""" + return sha1(source.encode("utf-8")).hexdigest() + + def get_bucket( + self, + environment: "Environment", + name: str, + filename: t.Optional[str], + source: str, + ) -> Bucket: + """Return a cache bucket for the given template. All arguments are + mandatory but filename may be `None`. + """ + key = self.get_cache_key(name, filename) + checksum = self.get_source_checksum(source) + bucket = Bucket(environment, key, checksum) + self.load_bytecode(bucket) + return bucket + + def set_bucket(self, bucket: Bucket) -> None: + """Put the bucket into the cache.""" + self.dump_bytecode(bucket) + + +class FileSystemBytecodeCache(BytecodeCache): + """A bytecode cache that stores bytecode on the filesystem. It accepts + two arguments: The directory where the cache items are stored and a + pattern string that is used to build the filename. + + If no directory is specified a default cache directory is selected. On + Windows the user's temp directory is used, on UNIX systems a directory + is created for the user in the system temp directory. + + The pattern can be used to have multiple separate caches operate on the + same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` + is replaced with the cache key. + + >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') + + This bytecode cache supports clearing of the cache using the clear method. + """ + + def __init__( + self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" + ) -> None: + if directory is None: + directory = self._get_default_cache_dir() + self.directory = directory + self.pattern = pattern + + def _get_default_cache_dir(self) -> str: + def _unsafe_dir() -> "te.NoReturn": + raise RuntimeError( + "Cannot determine safe temp directory. You " + "need to explicitly provide one." + ) + + tmpdir = tempfile.gettempdir() + + # On windows the temporary directory is used specific unless + # explicitly forced otherwise. We can just use that. + if os.name == "nt": + return tmpdir + if not hasattr(os, "getuid"): + _unsafe_dir() + + dirname = f"_jinja2-cache-{os.getuid()}" + actual_dir = os.path.join(tmpdir, dirname) + + try: + os.mkdir(actual_dir, stat.S_IRWXU) + except OSError as e: + if e.errno != errno.EEXIST: + raise + try: + os.chmod(actual_dir, stat.S_IRWXU) + actual_dir_stat = os.lstat(actual_dir) + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): + _unsafe_dir() + except OSError as e: + if e.errno != errno.EEXIST: + raise + + actual_dir_stat = os.lstat(actual_dir) + if ( + actual_dir_stat.st_uid != os.getuid() + or not stat.S_ISDIR(actual_dir_stat.st_mode) + or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU + ): + _unsafe_dir() + + return actual_dir + + def _get_cache_filename(self, bucket: Bucket) -> str: + return os.path.join(self.directory, self.pattern % (bucket.key,)) + + def load_bytecode(self, bucket: Bucket) -> None: + filename = self._get_cache_filename(bucket) + + # Don't test for existence before opening the file, since the + # file could disappear after the test before the open. + try: + f = open(filename, "rb") + except (FileNotFoundError, IsADirectoryError, PermissionError): + # PermissionError can occur on Windows when an operation is + # in progress, such as calling clear(). + return + + with f: + bucket.load_bytecode(f) + + def dump_bytecode(self, bucket: Bucket) -> None: + # Write to a temporary file, then rename to the real name after + # writing. This avoids another process reading the file before + # it is fully written. + name = self._get_cache_filename(bucket) + f = tempfile.NamedTemporaryFile( + mode="wb", + dir=os.path.dirname(name), + prefix=os.path.basename(name), + suffix=".tmp", + delete=False, + ) + + def remove_silent() -> None: + try: + os.remove(f.name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + pass + + try: + with f: + bucket.write_bytecode(f) + except BaseException: + remove_silent() + raise + + try: + os.replace(f.name, name) + except OSError: + # Another process may have called clear(). On Windows, + # another program may be holding the file open. + remove_silent() + except BaseException: + remove_silent() + raise + + def clear(self) -> None: + # imported lazily here because google app-engine doesn't support + # write access on the file system and the function does not exist + # normally. + from os import remove + + files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) + for filename in files: + try: + remove(os.path.join(self.directory, filename)) + except OSError: + pass + + +class MemcachedBytecodeCache(BytecodeCache): + """This class implements a bytecode cache that uses a memcache cache for + storing the information. It does not enforce a specific memcache library + (tummy's memcache or cmemcache) but will accept any class that provides + the minimal interface required. + + Libraries compatible with this class: + + - `cachelib <https://github.com/pallets/cachelib>`_ + - `python-memcached <https://pypi.org/project/python-memcached/>`_ + + (Unfortunately the django cache interface is not compatible because it + does not support storing binary data, only text. You can however pass + the underlying cache client to the bytecode cache which is available + as `django.core.cache.cache._client`.) + + The minimal interface for the client passed to the constructor is this: + + .. class:: MinimalClientInterface + + .. method:: set(key, value[, timeout]) + + Stores the bytecode in the cache. `value` is a string and + `timeout` the timeout of the key. If timeout is not provided + a default timeout or no timeout should be assumed, if it's + provided it's an integer with the number of seconds the cache + item should exist. + + .. method:: get(key) + + Returns the value for the cache key. If the item does not + exist in the cache the return value must be `None`. + + The other arguments to the constructor are the prefix for all keys that + is added before the actual cache key and the timeout for the bytecode in + the cache system. We recommend a high (or no) timeout. + + This bytecode cache does not support clearing of used items in the cache. + The clear method is a no-operation function. + + .. versionadded:: 2.7 + Added support for ignoring memcache errors through the + `ignore_memcache_errors` parameter. + """ + + def __init__( + self, + client: "_MemcachedClient", + prefix: str = "jinja2/bytecode/", + timeout: t.Optional[int] = None, + ignore_memcache_errors: bool = True, + ): + self.client = client + self.prefix = prefix + self.timeout = timeout + self.ignore_memcache_errors = ignore_memcache_errors + + def load_bytecode(self, bucket: Bucket) -> None: + try: + code = self.client.get(self.prefix + bucket.key) + except Exception: + if not self.ignore_memcache_errors: + raise + else: + bucket.bytecode_from_string(code) + + def dump_bytecode(self, bucket: Bucket) -> None: + key = self.prefix + bucket.key + value = bucket.bytecode_to_string() + + try: + if self.timeout is not None: + self.client.set(key, value, self.timeout) + else: + self.client.set(key, value) + except Exception: + if not self.ignore_memcache_errors: + raise diff --git a/backend/test/lib/python3.8/site-packages/jinja2/compiler.py b/backend/test/lib/python3.8/site-packages/jinja2/compiler.py new file mode 100644 index 0000000000000000000000000000000000000000..3458095f54ede1322eb2ab9e34288da87db54ca1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/compiler.py @@ -0,0 +1,1957 @@ +"""Compiles nodes from the parser into Python code.""" +import typing as t +from contextlib import contextmanager +from functools import update_wrapper +from io import StringIO +from itertools import chain +from keyword import iskeyword as is_python_keyword + +from markupsafe import escape +from markupsafe import Markup + +from . import nodes +from .exceptions import TemplateAssertionError +from .idtracking import Symbols +from .idtracking import VAR_LOAD_ALIAS +from .idtracking import VAR_LOAD_PARAMETER +from .idtracking import VAR_LOAD_RESOLVE +from .idtracking import VAR_LOAD_UNDEFINED +from .nodes import EvalContext +from .optimizer import Optimizer +from .utils import _PassArg +from .utils import concat +from .visitor import NodeVisitor + +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + +operators = { + "eq": "==", + "ne": "!=", + "gt": ">", + "gteq": ">=", + "lt": "<", + "lteq": "<=", + "in": "in", + "notin": "not in", +} + + +def optimizeconst(f: F) -> F: + def new_func( + self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any + ) -> t.Any: + # Only optimize if the frame is not volatile + if self.optimizer is not None and not frame.eval_ctx.volatile: + new_node = self.optimizer.visit(node, frame.eval_ctx) + + if new_node != node: + return self.visit(new_node, frame) + + return f(self, node, frame, **kwargs) + + return update_wrapper(t.cast(F, new_func), f) + + +def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed + and op in self.environment.intercepted_binops # type: ignore + ): + self.write(f"environment.call_binop(context, {op!r}, ") + self.visit(node.left, frame) + self.write(", ") + self.visit(node.right, frame) + else: + self.write("(") + self.visit(node.left, frame) + self.write(f" {op} ") + self.visit(node.right, frame) + + self.write(")") + + return visitor + + +def _make_unop( + op: str, +) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed + and op in self.environment.intercepted_unops # type: ignore + ): + self.write(f"environment.call_unop(context, {op!r}, ") + self.visit(node.node, frame) + else: + self.write("(" + op) + self.visit(node.node, frame) + + self.write(")") + + return visitor + + +def generate( + node: nodes.Template, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, +) -> t.Optional[str]: + """Generate the python source for a node tree.""" + if not isinstance(node, nodes.Template): + raise TypeError("Can't compile non template nodes") + + generator = environment.code_generator_class( + environment, name, filename, stream, defer_init, optimized + ) + generator.visit(node) + + if stream is None: + return generator.stream.getvalue() # type: ignore + + return None + + +def has_safe_repr(value: t.Any) -> bool: + """Does the node have a safe representation?""" + if value is None or value is NotImplemented or value is Ellipsis: + return True + + if type(value) in {bool, int, float, complex, range, str, Markup}: + return True + + if type(value) in {tuple, list, set, frozenset}: + return all(has_safe_repr(v) for v in value) + + if type(value) is dict: + return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) + + return False + + +def find_undeclared( + nodes: t.Iterable[nodes.Node], names: t.Iterable[str] +) -> t.Set[str]: + """Check if the names passed are accessed undeclared. The return value + is a set of all the undeclared names from the sequence of names found. + """ + visitor = UndeclaredNameVisitor(names) + try: + for node in nodes: + visitor.visit(node) + except VisitorExit: + pass + return visitor.undeclared + + +class MacroRef: + def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: + self.node = node + self.accesses_caller = False + self.accesses_kwargs = False + self.accesses_varargs = False + + +class Frame: + """Holds compile time information for us.""" + + def __init__( + self, + eval_ctx: EvalContext, + parent: t.Optional["Frame"] = None, + level: t.Optional[int] = None, + ) -> None: + self.eval_ctx = eval_ctx + + # the parent of this frame + self.parent = parent + + if parent is None: + self.symbols = Symbols(level=level) + + # in some dynamic inheritance situations the compiler needs to add + # write tests around output statements. + self.require_output_check = False + + # inside some tags we are using a buffer rather than yield statements. + # this for example affects {% filter %} or {% macro %}. If a frame + # is buffered this variable points to the name of the list used as + # buffer. + self.buffer: t.Optional[str] = None + + # the name of the block we're in, otherwise None. + self.block: t.Optional[str] = None + + else: + self.symbols = Symbols(parent.symbols, level=level) + self.require_output_check = parent.require_output_check + self.buffer = parent.buffer + self.block = parent.block + + # a toplevel frame is the root + soft frames such as if conditions. + self.toplevel = False + + # the root frame is basically just the outermost frame, so no if + # conditions. This information is used to optimize inheritance + # situations. + self.rootlevel = False + + # variables set inside of loops and blocks should not affect outer frames, + # but they still needs to be kept track of as part of the active context. + self.loop_frame = False + self.block_frame = False + + # track whether the frame is being used in an if-statement or conditional + # expression as it determines which errors should be raised during runtime + # or compile time. + self.soft_frame = False + + def copy(self) -> "Frame": + """Create a copy of the current one.""" + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.symbols = self.symbols.copy() + return rv + + def inner(self, isolated: bool = False) -> "Frame": + """Return an inner frame.""" + if isolated: + return Frame(self.eval_ctx, level=self.symbols.level + 1) + return Frame(self.eval_ctx, self) + + def soft(self) -> "Frame": + """Return a soft frame. A soft frame may not be modified as + standalone thing as it shares the resources with the frame it + was created of, but it's not a rootlevel frame any longer. + + This is only used to implement if-statements and conditional + expressions. + """ + rv = self.copy() + rv.rootlevel = False + rv.soft_frame = True + return rv + + __copy__ = copy + + +class VisitorExit(RuntimeError): + """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" + + +class DependencyFinderVisitor(NodeVisitor): + """A visitor that collects filter and test calls.""" + + def __init__(self) -> None: + self.filters: t.Set[str] = set() + self.tests: t.Set[str] = set() + + def visit_Filter(self, node: nodes.Filter) -> None: + self.generic_visit(node) + self.filters.add(node.name) + + def visit_Test(self, node: nodes.Test) -> None: + self.generic_visit(node) + self.tests.add(node.name) + + def visit_Block(self, node: nodes.Block) -> None: + """Stop visiting at blocks.""" + + +class UndeclaredNameVisitor(NodeVisitor): + """A visitor that checks if a name is accessed without being + declared. This is different from the frame visitor as it will + not stop at closure frames. + """ + + def __init__(self, names: t.Iterable[str]) -> None: + self.names = set(names) + self.undeclared: t.Set[str] = set() + + def visit_Name(self, node: nodes.Name) -> None: + if node.ctx == "load" and node.name in self.names: + self.undeclared.add(node.name) + if self.undeclared == self.names: + raise VisitorExit() + else: + self.names.discard(node.name) + + def visit_Block(self, node: nodes.Block) -> None: + """Stop visiting a blocks.""" + + +class CompilerExit(Exception): + """Raised if the compiler encountered a situation where it just + doesn't make sense to further process the code. Any block that + raises such an exception is not further processed. + """ + + +class CodeGenerator(NodeVisitor): + def __init__( + self, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, + ) -> None: + if stream is None: + stream = StringIO() + self.environment = environment + self.name = name + self.filename = filename + self.stream = stream + self.created_block_context = False + self.defer_init = defer_init + self.optimizer: t.Optional[Optimizer] = None + + if optimized: + self.optimizer = Optimizer(environment) + + # aliases for imports + self.import_aliases: t.Dict[str, str] = {} + + # a registry for all blocks. Because blocks are moved out + # into the global python scope they are registered here + self.blocks: t.Dict[str, nodes.Block] = {} + + # the number of extends statements so far + self.extends_so_far = 0 + + # some templates have a rootlevel extends. In this case we + # can safely assume that we're a child template and do some + # more optimizations. + self.has_known_extends = False + + # the current line number + self.code_lineno = 1 + + # registry of all filters and tests (global, not block local) + self.tests: t.Dict[str, str] = {} + self.filters: t.Dict[str, str] = {} + + # the debug information + self.debug_info: t.List[t.Tuple[int, int]] = [] + self._write_debug_info: t.Optional[int] = None + + # the number of new lines before the next write() + self._new_lines = 0 + + # the line number of the last written statement + self._last_line = 0 + + # true if nothing was written so far. + self._first_write = True + + # used by the `temporary_identifier` method to get new + # unique, temporary identifier + self._last_identifier = 0 + + # the current indentation + self._indentation = 0 + + # Tracks toplevel assignments + self._assign_stack: t.List[t.Set[str]] = [] + + # Tracks parameter definition blocks + self._param_def_block: t.List[t.Set[str]] = [] + + # Tracks the current context. + self._context_reference_stack = ["context"] + + @property + def optimized(self) -> bool: + return self.optimizer is not None + + # -- Various compilation helpers + + def fail(self, msg: str, lineno: int) -> "te.NoReturn": + """Fail with a :exc:`TemplateAssertionError`.""" + raise TemplateAssertionError(msg, lineno, self.name, self.filename) + + def temporary_identifier(self) -> str: + """Get a new unique identifier.""" + self._last_identifier += 1 + return f"t_{self._last_identifier}" + + def buffer(self, frame: Frame) -> None: + """Enable buffering for the frame from that point onwards.""" + frame.buffer = self.temporary_identifier() + self.writeline(f"{frame.buffer} = []") + + def return_buffer_contents( + self, frame: Frame, force_unescaped: bool = False + ) -> None: + """Return the buffer contents of the frame.""" + if not force_unescaped: + if frame.eval_ctx.volatile: + self.writeline("if context.eval_ctx.autoescape:") + self.indent() + self.writeline(f"return Markup(concat({frame.buffer}))") + self.outdent() + self.writeline("else:") + self.indent() + self.writeline(f"return concat({frame.buffer})") + self.outdent() + return + elif frame.eval_ctx.autoescape: + self.writeline(f"return Markup(concat({frame.buffer}))") + return + self.writeline(f"return concat({frame.buffer})") + + def indent(self) -> None: + """Indent by one.""" + self._indentation += 1 + + def outdent(self, step: int = 1) -> None: + """Outdent by step.""" + self._indentation -= step + + def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: + """Yield or write into the frame buffer.""" + if frame.buffer is None: + self.writeline("yield ", node) + else: + self.writeline(f"{frame.buffer}.append(", node) + + def end_write(self, frame: Frame) -> None: + """End the writing process started by `start_write`.""" + if frame.buffer is not None: + self.write(")") + + def simple_write( + self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None + ) -> None: + """Simple shortcut for start_write + write + end_write.""" + self.start_write(frame, node) + self.write(s) + self.end_write(frame) + + def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: + """Visit a list of nodes as block in a frame. If the current frame + is no buffer a dummy ``if 0: yield None`` is written automatically. + """ + try: + self.writeline("pass") + for node in nodes: + self.visit(node, frame) + except CompilerExit: + pass + + def write(self, x: str) -> None: + """Write a string into the output stream.""" + if self._new_lines: + if not self._first_write: + self.stream.write("\n" * self._new_lines) + self.code_lineno += self._new_lines + if self._write_debug_info is not None: + self.debug_info.append((self._write_debug_info, self.code_lineno)) + self._write_debug_info = None + self._first_write = False + self.stream.write(" " * self._indentation) + self._new_lines = 0 + self.stream.write(x) + + def writeline( + self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 + ) -> None: + """Combination of newline and write.""" + self.newline(node, extra) + self.write(x) + + def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: + """Add one or more newlines before the next write.""" + self._new_lines = max(self._new_lines, 1 + extra) + if node is not None and node.lineno != self._last_line: + self._write_debug_info = node.lineno + self._last_line = node.lineno + + def signature( + self, + node: t.Union[nodes.Call, nodes.Filter, nodes.Test], + frame: Frame, + extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> None: + """Writes a function call to the stream for the current node. + A leading comma is added automatically. The extra keyword + arguments may not include python keywords otherwise a syntax + error could occur. The extra keyword arguments should be given + as python dict. + """ + # if any of the given keyword arguments is a python keyword + # we have to make sure that no invalid call is created. + kwarg_workaround = any( + is_python_keyword(t.cast(str, k)) + for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) + ) + + for arg in node.args: + self.write(", ") + self.visit(arg, frame) + + if not kwarg_workaround: + for kwarg in node.kwargs: + self.write(", ") + self.visit(kwarg, frame) + if extra_kwargs is not None: + for key, value in extra_kwargs.items(): + self.write(f", {key}={value}") + if node.dyn_args: + self.write(", *") + self.visit(node.dyn_args, frame) + + if kwarg_workaround: + if node.dyn_kwargs is not None: + self.write(", **dict({") + else: + self.write(", **{") + for kwarg in node.kwargs: + self.write(f"{kwarg.key!r}: ") + self.visit(kwarg.value, frame) + self.write(", ") + if extra_kwargs is not None: + for key, value in extra_kwargs.items(): + self.write(f"{key!r}: {value}, ") + if node.dyn_kwargs is not None: + self.write("}, **") + self.visit(node.dyn_kwargs, frame) + self.write(")") + else: + self.write("}") + + elif node.dyn_kwargs is not None: + self.write(", **") + self.visit(node.dyn_kwargs, frame) + + def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: + """Find all filter and test names used in the template and + assign them to variables in the compiled namespace. Checking + that the names are registered with the environment is done when + compiling the Filter and Test nodes. If the node is in an If or + CondExpr node, the check is done at runtime instead. + + .. versionchanged:: 3.0 + Filters and tests in If and CondExpr nodes are checked at + runtime instead of compile time. + """ + visitor = DependencyFinderVisitor() + + for node in nodes: + visitor.visit(node) + + for id_map, names, dependency in (self.filters, visitor.filters, "filters"), ( + self.tests, + visitor.tests, + "tests", + ): + for name in sorted(names): + if name not in id_map: + id_map[name] = self.temporary_identifier() + + # add check during runtime that dependencies used inside of executed + # blocks are defined, as this step may be skipped during compile time + self.writeline("try:") + self.indent() + self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") + self.outdent() + self.writeline("except KeyError:") + self.indent() + self.writeline("@internalcode") + self.writeline(f"def {id_map[name]}(*unused):") + self.indent() + self.writeline( + f'raise TemplateRuntimeError("No {dependency[:-1]}' + f' named {name!r} found.")' + ) + self.outdent() + self.outdent() + + def enter_frame(self, frame: Frame) -> None: + undefs = [] + for target, (action, param) in frame.symbols.loads.items(): + if action == VAR_LOAD_PARAMETER: + pass + elif action == VAR_LOAD_RESOLVE: + self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") + elif action == VAR_LOAD_ALIAS: + self.writeline(f"{target} = {param}") + elif action == VAR_LOAD_UNDEFINED: + undefs.append(target) + else: + raise NotImplementedError("unknown load instruction") + if undefs: + self.writeline(f"{' = '.join(undefs)} = missing") + + def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: + if not with_python_scope: + undefs = [] + for target in frame.symbols.loads: + undefs.append(target) + if undefs: + self.writeline(f"{' = '.join(undefs)} = missing") + + def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: + return async_value if self.environment.is_async else sync_value + + def func(self, name: str) -> str: + return f"{self.choose_async()}def {name}" + + def macro_body( + self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame + ) -> t.Tuple[Frame, MacroRef]: + """Dump the function def of a macro or call block.""" + frame = frame.inner() + frame.symbols.analyze_node(node) + macro_ref = MacroRef(node) + + explicit_caller = None + skip_special_params = set() + args = [] + + for idx, arg in enumerate(node.args): + if arg.name == "caller": + explicit_caller = idx + if arg.name in ("kwargs", "varargs"): + skip_special_params.add(arg.name) + args.append(frame.symbols.ref(arg.name)) + + undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) + + if "caller" in undeclared: + # In older Jinja versions there was a bug that allowed caller + # to retain the special behavior even if it was mentioned in + # the argument list. However thankfully this was only really + # working if it was the last argument. So we are explicitly + # checking this now and error out if it is anywhere else in + # the argument list. + if explicit_caller is not None: + try: + node.defaults[explicit_caller - len(node.args)] + except IndexError: + self.fail( + "When defining macros or call blocks the " + 'special "caller" argument must be omitted ' + "or be given a default.", + node.lineno, + ) + else: + args.append(frame.symbols.declare_parameter("caller")) + macro_ref.accesses_caller = True + if "kwargs" in undeclared and "kwargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("kwargs")) + macro_ref.accesses_kwargs = True + if "varargs" in undeclared and "varargs" not in skip_special_params: + args.append(frame.symbols.declare_parameter("varargs")) + macro_ref.accesses_varargs = True + + # macros are delayed, they never require output checks + frame.require_output_check = False + frame.symbols.analyze_node(node) + self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) + self.indent() + + self.buffer(frame) + self.enter_frame(frame) + + self.push_parameter_definitions(frame) + for idx, arg in enumerate(node.args): + ref = frame.symbols.ref(arg.name) + self.writeline(f"if {ref} is missing:") + self.indent() + try: + default = node.defaults[idx - len(node.args)] + except IndexError: + self.writeline( + f'{ref} = undefined("parameter {arg.name!r} was not provided",' + f" name={arg.name!r})" + ) + else: + self.writeline(f"{ref} = ") + self.visit(default, frame) + self.mark_parameter_stored(ref) + self.outdent() + self.pop_parameter_definitions() + + self.blockvisit(node.body, frame) + self.return_buffer_contents(frame, force_unescaped=True) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + return frame, macro_ref + + def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: + """Dump the macro definition for the def created by macro_body.""" + arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) + name = getattr(macro_ref.node, "name", None) + if len(macro_ref.node.args) == 1: + arg_tuple += "," + self.write( + f"Macro(environment, macro, {name!r}, ({arg_tuple})," + f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," + f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" + ) + + def position(self, node: nodes.Node) -> str: + """Return a human readable position for the node.""" + rv = f"line {node.lineno}" + if self.name is not None: + rv = f"{rv} in {self.name!r}" + return rv + + def dump_local_context(self, frame: Frame) -> str: + items_kv = ", ".join( + f"{name!r}: {target}" + for name, target in frame.symbols.dump_stores().items() + ) + return f"{{{items_kv}}}" + + def write_commons(self) -> None: + """Writes a common preamble that is used by root and block functions. + Primarily this sets up common local helpers and enforces a generator + through a dead branch. + """ + self.writeline("resolve = context.resolve_or_missing") + self.writeline("undefined = environment.undefined") + self.writeline("concat = environment.concat") + # always use the standard Undefined class for the implicit else of + # conditional expressions + self.writeline("cond_expr_undefined = Undefined") + self.writeline("if 0: yield None") + + def push_parameter_definitions(self, frame: Frame) -> None: + """Pushes all parameter targets from the given frame into a local + stack that permits tracking of yet to be assigned parameters. In + particular this enables the optimization from `visit_Name` to skip + undefined expressions for parameters in macros as macros can reference + otherwise unbound parameters. + """ + self._param_def_block.append(frame.symbols.dump_param_targets()) + + def pop_parameter_definitions(self) -> None: + """Pops the current parameter definitions set.""" + self._param_def_block.pop() + + def mark_parameter_stored(self, target: str) -> None: + """Marks a parameter in the current parameter definitions as stored. + This will skip the enforced undefined checks. + """ + if self._param_def_block: + self._param_def_block[-1].discard(target) + + def push_context_reference(self, target: str) -> None: + self._context_reference_stack.append(target) + + def pop_context_reference(self) -> None: + self._context_reference_stack.pop() + + def get_context_ref(self) -> str: + return self._context_reference_stack[-1] + + def get_resolve_func(self) -> str: + target = self._context_reference_stack[-1] + if target == "context": + return "resolve" + return f"{target}.resolve" + + def derive_context(self, frame: Frame) -> str: + return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" + + def parameter_is_undeclared(self, target: str) -> bool: + """Checks if a given target is an undeclared parameter.""" + if not self._param_def_block: + return False + return target in self._param_def_block[-1] + + def push_assign_tracking(self) -> None: + """Pushes a new layer for assignment tracking.""" + self._assign_stack.append(set()) + + def pop_assign_tracking(self, frame: Frame) -> None: + """Pops the topmost level for assignment tracking and updates the + context variables if necessary. + """ + vars = self._assign_stack.pop() + if ( + not frame.block_frame + and not frame.loop_frame + and not frame.toplevel + or not vars + ): + return + public_names = [x for x in vars if x[:1] != "_"] + if len(vars) == 1: + name = next(iter(vars)) + ref = frame.symbols.ref(name) + if frame.loop_frame: + self.writeline(f"_loop_vars[{name!r}] = {ref}") + return + if frame.block_frame: + self.writeline(f"_block_vars[{name!r}] = {ref}") + return + self.writeline(f"context.vars[{name!r}] = {ref}") + else: + if frame.loop_frame: + self.writeline("_loop_vars.update({") + elif frame.block_frame: + self.writeline("_block_vars.update({") + else: + self.writeline("context.vars.update({") + for idx, name in enumerate(vars): + if idx: + self.write(", ") + ref = frame.symbols.ref(name) + self.write(f"{name!r}: {ref}") + self.write("})") + if not frame.block_frame and not frame.loop_frame and public_names: + if len(public_names) == 1: + self.writeline(f"context.exported_vars.add({public_names[0]!r})") + else: + names_str = ", ".join(map(repr, public_names)) + self.writeline(f"context.exported_vars.update(({names_str}))") + + # -- Statement Visitors + + def visit_Template( + self, node: nodes.Template, frame: t.Optional[Frame] = None + ) -> None: + assert frame is None, "no root frame allowed" + eval_ctx = EvalContext(self.environment, self.name) + + from .runtime import exported, async_exported + + if self.environment.is_async: + exported_names = sorted(exported + async_exported) + else: + exported_names = sorted(exported) + + self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) + + # if we want a deferred initialization we cannot move the + # environment into a local name + envenv = "" if self.defer_init else ", environment=environment" + + # do we have an extends tag at all? If not, we can save some + # overhead by just not processing any inheritance code. + have_extends = node.find(nodes.Extends) is not None + + # find all blocks + for block in node.find_all(nodes.Block): + if block.name in self.blocks: + self.fail(f"block {block.name!r} defined twice", block.lineno) + self.blocks[block.name] = block + + # find all imports and import them + for import_ in node.find_all(nodes.ImportedName): + if import_.importname not in self.import_aliases: + imp = import_.importname + self.import_aliases[imp] = alias = self.temporary_identifier() + if "." in imp: + module, obj = imp.rsplit(".", 1) + self.writeline(f"from {module} import {obj} as {alias}") + else: + self.writeline(f"import {imp} as {alias}") + + # add the load name + self.writeline(f"name = {self.name!r}") + + # generate the root render function. + self.writeline( + f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 + ) + self.indent() + self.write_commons() + + # process the root + frame = Frame(eval_ctx) + if "self" in find_undeclared(node.body, ("self",)): + ref = frame.symbols.declare_parameter("self") + self.writeline(f"{ref} = TemplateReference(context)") + frame.symbols.analyze_node(node) + frame.toplevel = frame.rootlevel = True + frame.require_output_check = have_extends and not self.has_known_extends + if have_extends: + self.writeline("parent_template = None") + self.enter_frame(frame) + self.pull_dependencies(node.body) + self.blockvisit(node.body, frame) + self.leave_frame(frame, with_python_scope=True) + self.outdent() + + # make sure that the parent root is called. + if have_extends: + if not self.has_known_extends: + self.indent() + self.writeline("if parent_template is not None:") + self.indent() + if not self.environment.is_async: + self.writeline("yield from parent_template.root_render_func(context)") + else: + self.writeline( + "async for event in parent_template.root_render_func(context):" + ) + self.indent() + self.writeline("yield event") + self.outdent() + self.outdent(1 + (not self.has_known_extends)) + + # at this point we now have the blocks collected and can visit them too. + for name, block in self.blocks.items(): + self.writeline( + f"{self.func('block_' + name)}(context, missing=missing{envenv}):", + block, + 1, + ) + self.indent() + self.write_commons() + # It's important that we do not make this frame a child of the + # toplevel template. This would cause a variety of + # interesting issues with identifier tracking. + block_frame = Frame(eval_ctx) + block_frame.block_frame = True + undeclared = find_undeclared(block.body, ("self", "super")) + if "self" in undeclared: + ref = block_frame.symbols.declare_parameter("self") + self.writeline(f"{ref} = TemplateReference(context)") + if "super" in undeclared: + ref = block_frame.symbols.declare_parameter("super") + self.writeline(f"{ref} = context.super({name!r}, block_{name})") + block_frame.symbols.analyze_node(block) + block_frame.block = name + self.writeline("_block_vars = {}") + self.enter_frame(block_frame) + self.pull_dependencies(block.body) + self.blockvisit(block.body, block_frame) + self.leave_frame(block_frame, with_python_scope=True) + self.outdent() + + blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) + self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) + debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) + self.writeline(f"debug_info = {debug_kv_str!r}") + + def visit_Block(self, node: nodes.Block, frame: Frame) -> None: + """Call a block and register it for the template.""" + level = 0 + if frame.toplevel: + # if we know that we are a child template, there is no need to + # check if we are one + if self.has_known_extends: + return + if self.extends_so_far > 0: + self.writeline("if parent_template is None:") + self.indent() + level += 1 + + if node.scoped: + context = self.derive_context(frame) + else: + context = self.get_context_ref() + + if node.required: + self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) + self.indent() + self.writeline( + f'raise TemplateRuntimeError("Required block {node.name!r} not found")', + node, + ) + self.outdent() + + if not self.environment.is_async and frame.buffer is None: + self.writeline( + f"yield from context.blocks[{node.name!r}][0]({context})", node + ) + else: + self.writeline( + f"{self.choose_async()}for event in" + f" context.blocks[{node.name!r}][0]({context}):", + node, + ) + self.indent() + self.simple_write("event", frame) + self.outdent() + + self.outdent(level) + + def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: + """Calls the extender.""" + if not frame.toplevel: + self.fail("cannot use extend from a non top-level scope", node.lineno) + + # if the number of extends statements in general is zero so + # far, we don't have to add a check if something extended + # the template before this one. + if self.extends_so_far > 0: + + # if we have a known extends we just add a template runtime + # error into the generated code. We could catch that at compile + # time too, but i welcome it not to confuse users by throwing the + # same error at different times just "because we can". + if not self.has_known_extends: + self.writeline("if parent_template is not None:") + self.indent() + self.writeline('raise TemplateRuntimeError("extended multiple times")') + + # if we have a known extends already we don't need that code here + # as we know that the template execution will end here. + if self.has_known_extends: + raise CompilerExit() + else: + self.outdent() + + self.writeline("parent_template = environment.get_template(", node) + self.visit(node.template, frame) + self.write(f", {self.name!r})") + self.writeline("for name, parent_block in parent_template.blocks.items():") + self.indent() + self.writeline("context.blocks.setdefault(name, []).append(parent_block)") + self.outdent() + + # if this extends statement was in the root level we can take + # advantage of that information and simplify the generated code + # in the top level from this point onwards + if frame.rootlevel: + self.has_known_extends = True + + # and now we have one more + self.extends_so_far += 1 + + def visit_Include(self, node: nodes.Include, frame: Frame) -> None: + """Handles includes.""" + if node.ignore_missing: + self.writeline("try:") + self.indent() + + func_name = "get_or_select_template" + if isinstance(node.template, nodes.Const): + if isinstance(node.template.value, str): + func_name = "get_template" + elif isinstance(node.template.value, (tuple, list)): + func_name = "select_template" + elif isinstance(node.template, (nodes.Tuple, nodes.List)): + func_name = "select_template" + + self.writeline(f"template = environment.{func_name}(", node) + self.visit(node.template, frame) + self.write(f", {self.name!r})") + if node.ignore_missing: + self.outdent() + self.writeline("except TemplateNotFound:") + self.indent() + self.writeline("pass") + self.outdent() + self.writeline("else:") + self.indent() + + skip_event_yield = False + if node.with_context: + self.writeline( + f"{self.choose_async()}for event in template.root_render_func(" + "template.new_context(context.get_all(), True," + f" {self.dump_local_context(frame)})):" + ) + elif self.environment.is_async: + self.writeline( + "for event in (await template._get_default_module_async())" + "._body_stream:" + ) + else: + self.writeline("yield from template._get_default_module()._body_stream") + skip_event_yield = True + + if not skip_event_yield: + self.indent() + self.simple_write("event", frame) + self.outdent() + + if node.ignore_missing: + self.outdent() + + def _import_common( + self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame + ) -> None: + self.write(f"{self.choose_async('await ')}environment.get_template(") + self.visit(node.template, frame) + self.write(f", {self.name!r}).") + + if node.with_context: + f_name = f"make_module{self.choose_async('_async')}" + self.write( + f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" + ) + else: + self.write(f"_get_default_module{self.choose_async('_async')}(context)") + + def visit_Import(self, node: nodes.Import, frame: Frame) -> None: + """Visit regular imports.""" + self.writeline(f"{frame.symbols.ref(node.target)} = ", node) + if frame.toplevel: + self.write(f"context.vars[{node.target!r}] = ") + + self._import_common(node, frame) + + if frame.toplevel and not node.target.startswith("_"): + self.writeline(f"context.exported_vars.discard({node.target!r})") + + def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: + """Visit named imports.""" + self.newline(node) + self.write("included_template = ") + self._import_common(node, frame) + var_names = [] + discarded_names = [] + for name in node.names: + if isinstance(name, tuple): + name, alias = name + else: + alias = name + self.writeline( + f"{frame.symbols.ref(alias)} =" + f" getattr(included_template, {name!r}, missing)" + ) + self.writeline(f"if {frame.symbols.ref(alias)} is missing:") + self.indent() + message = ( + "the template {included_template.__name__!r}" + f" (imported on {self.position(node)})" + f" does not export the requested name {name!r}" + ) + self.writeline( + f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" + ) + self.outdent() + if frame.toplevel: + var_names.append(alias) + if not alias.startswith("_"): + discarded_names.append(alias) + + if var_names: + if len(var_names) == 1: + name = var_names[0] + self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") + else: + names_kv = ", ".join( + f"{name!r}: {frame.symbols.ref(name)}" for name in var_names + ) + self.writeline(f"context.vars.update({{{names_kv}}})") + if discarded_names: + if len(discarded_names) == 1: + self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") + else: + names_str = ", ".join(map(repr, discarded_names)) + self.writeline( + f"context.exported_vars.difference_update(({names_str}))" + ) + + def visit_For(self, node: nodes.For, frame: Frame) -> None: + loop_frame = frame.inner() + loop_frame.loop_frame = True + test_frame = frame.inner() + else_frame = frame.inner() + + # try to figure out if we have an extended loop. An extended loop + # is necessary if the loop is in recursive mode if the special loop + # variable is accessed in the body if the body is a scoped block. + extended_loop = ( + node.recursive + or "loop" + in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) + or any(block.scoped for block in node.find_all(nodes.Block)) + ) + + loop_ref = None + if extended_loop: + loop_ref = loop_frame.symbols.declare_parameter("loop") + + loop_frame.symbols.analyze_node(node, for_branch="body") + if node.else_: + else_frame.symbols.analyze_node(node, for_branch="else") + + if node.test: + loop_filter_func = self.temporary_identifier() + test_frame.symbols.analyze_node(node, for_branch="test") + self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) + self.indent() + self.enter_frame(test_frame) + self.writeline(self.choose_async("async for ", "for ")) + self.visit(node.target, loop_frame) + self.write(" in ") + self.write(self.choose_async("auto_aiter(fiter)", "fiter")) + self.write(":") + self.indent() + self.writeline("if ", node.test) + self.visit(node.test, test_frame) + self.write(":") + self.indent() + self.writeline("yield ") + self.visit(node.target, loop_frame) + self.outdent(3) + self.leave_frame(test_frame, with_python_scope=True) + + # if we don't have an recursive loop we have to find the shadowed + # variables at that point. Because loops can be nested but the loop + # variable is a special one we have to enforce aliasing for it. + if node.recursive: + self.writeline( + f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node + ) + self.indent() + self.buffer(loop_frame) + + # Use the same buffer for the else frame + else_frame.buffer = loop_frame.buffer + + # make sure the loop variable is a special one and raise a template + # assertion error if a loop tries to write to loop + if extended_loop: + self.writeline(f"{loop_ref} = missing") + + for name in node.find_all(nodes.Name): + if name.ctx == "store" and name.name == "loop": + self.fail( + "Can't assign to special loop variable in for-loop target", + name.lineno, + ) + + if node.else_: + iteration_indicator = self.temporary_identifier() + self.writeline(f"{iteration_indicator} = 1") + + self.writeline(self.choose_async("async for ", "for "), node) + self.visit(node.target, loop_frame) + if extended_loop: + self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") + else: + self.write(" in ") + + if node.test: + self.write(f"{loop_filter_func}(") + if node.recursive: + self.write("reciter") + else: + if self.environment.is_async and not extended_loop: + self.write("auto_aiter(") + self.visit(node.iter, frame) + if self.environment.is_async and not extended_loop: + self.write(")") + if node.test: + self.write(")") + + if node.recursive: + self.write(", undefined, loop_render_func, depth):") + else: + self.write(", undefined):" if extended_loop else ":") + + self.indent() + self.enter_frame(loop_frame) + + self.writeline("_loop_vars = {}") + self.blockvisit(node.body, loop_frame) + if node.else_: + self.writeline(f"{iteration_indicator} = 0") + self.outdent() + self.leave_frame( + loop_frame, with_python_scope=node.recursive and not node.else_ + ) + + if node.else_: + self.writeline(f"if {iteration_indicator}:") + self.indent() + self.enter_frame(else_frame) + self.blockvisit(node.else_, else_frame) + self.leave_frame(else_frame) + self.outdent() + + # if the node was recursive we have to return the buffer contents + # and start the iteration code + if node.recursive: + self.return_buffer_contents(loop_frame) + self.outdent() + self.start_write(frame, node) + self.write(f"{self.choose_async('await ')}loop(") + if self.environment.is_async: + self.write("auto_aiter(") + self.visit(node.iter, frame) + if self.environment.is_async: + self.write(")") + self.write(", loop)") + self.end_write(frame) + + # at the end of the iteration, clear any assignments made in the + # loop from the top level + if self._assign_stack: + self._assign_stack[-1].difference_update(loop_frame.symbols.stores) + + def visit_If(self, node: nodes.If, frame: Frame) -> None: + if_frame = frame.soft() + self.writeline("if ", node) + self.visit(node.test, if_frame) + self.write(":") + self.indent() + self.blockvisit(node.body, if_frame) + self.outdent() + for elif_ in node.elif_: + self.writeline("elif ", elif_) + self.visit(elif_.test, if_frame) + self.write(":") + self.indent() + self.blockvisit(elif_.body, if_frame) + self.outdent() + if node.else_: + self.writeline("else:") + self.indent() + self.blockvisit(node.else_, if_frame) + self.outdent() + + def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: + macro_frame, macro_ref = self.macro_body(node, frame) + self.newline() + if frame.toplevel: + if not node.name.startswith("_"): + self.write(f"context.exported_vars.add({node.name!r})") + self.writeline(f"context.vars[{node.name!r}] = ") + self.write(f"{frame.symbols.ref(node.name)} = ") + self.macro_def(macro_ref, macro_frame) + + def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: + call_frame, macro_ref = self.macro_body(node, frame) + self.writeline("caller = ") + self.macro_def(macro_ref, call_frame) + self.start_write(frame, node) + self.visit_Call(node.call, frame, forward_caller=True) + self.end_write(frame) + + def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: + filter_frame = frame.inner() + filter_frame.symbols.analyze_node(node) + self.enter_frame(filter_frame) + self.buffer(filter_frame) + self.blockvisit(node.body, filter_frame) + self.start_write(frame, node) + self.visit_Filter(node.filter, filter_frame) + self.end_write(frame) + self.leave_frame(filter_frame) + + def visit_With(self, node: nodes.With, frame: Frame) -> None: + with_frame = frame.inner() + with_frame.symbols.analyze_node(node) + self.enter_frame(with_frame) + for target, expr in zip(node.targets, node.values): + self.newline() + self.visit(target, with_frame) + self.write(" = ") + self.visit(expr, frame) + self.blockvisit(node.body, with_frame) + self.leave_frame(with_frame) + + def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: + self.newline(node) + self.visit(node.node, frame) + + class _FinalizeInfo(t.NamedTuple): + const: t.Optional[t.Callable[..., str]] + src: t.Optional[str] + + @staticmethod + def _default_finalize(value: t.Any) -> t.Any: + """The default finalize function if the environment isn't + configured with one. Or, if the environment has one, this is + called on that function's output for constants. + """ + return str(value) + + _finalize: t.Optional[_FinalizeInfo] = None + + def _make_finalize(self) -> _FinalizeInfo: + """Build the finalize function to be used on constants and at + runtime. Cached so it's only created once for all output nodes. + + Returns a ``namedtuple`` with the following attributes: + + ``const`` + A function to finalize constant data at compile time. + + ``src`` + Source code to output around nodes to be evaluated at + runtime. + """ + if self._finalize is not None: + return self._finalize + + finalize: t.Optional[t.Callable[..., t.Any]] + finalize = default = self._default_finalize + src = None + + if self.environment.finalize: + src = "environment.finalize(" + env_finalize = self.environment.finalize + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(env_finalize) # type: ignore + ) + finalize = None + + if pass_arg is None: + + def finalize(value: t.Any) -> t.Any: + return default(env_finalize(value)) + + else: + src = f"{src}{pass_arg}, " + + if pass_arg == "environment": + + def finalize(value: t.Any) -> t.Any: + return default(env_finalize(self.environment, value)) + + self._finalize = self._FinalizeInfo(finalize, src) + return self._finalize + + def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: + """Given a group of constant values converted from ``Output`` + child nodes, produce a string to write to the template module + source. + """ + return repr(concat(group)) + + def _output_child_to_const( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> str: + """Try to optimize a child of an ``Output`` node by trying to + convert it to constant, finalized data at compile time. + + If :exc:`Impossible` is raised, the node is not constant and + will be evaluated at runtime. Any other exception will also be + evaluated at runtime for easier debugging. + """ + const = node.as_const(frame.eval_ctx) + + if frame.eval_ctx.autoescape: + const = escape(const) + + # Template data doesn't go through finalize. + if isinstance(node, nodes.TemplateData): + return str(const) + + return finalize.const(const) # type: ignore + + def _output_child_pre( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: + """Output extra source code before visiting a child of an + ``Output`` node. + """ + if frame.eval_ctx.volatile: + self.write("(escape if context.eval_ctx.autoescape else str)(") + elif frame.eval_ctx.autoescape: + self.write("escape(") + else: + self.write("str(") + + if finalize.src is not None: + self.write(finalize.src) + + def _output_child_post( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: + """Output extra source code after visiting a child of an + ``Output`` node. + """ + self.write(")") + + if finalize.src is not None: + self.write(")") + + def visit_Output(self, node: nodes.Output, frame: Frame) -> None: + # If an extends is active, don't render outside a block. + if frame.require_output_check: + # A top-level extends is known to exist at compile time. + if self.has_known_extends: + return + + self.writeline("if parent_template is None:") + self.indent() + + finalize = self._make_finalize() + body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] + + # Evaluate constants at compile time if possible. Each item in + # body will be either a list of static data or a node to be + # evaluated at runtime. + for child in node.nodes: + try: + if not ( + # If the finalize function requires runtime context, + # constants can't be evaluated at compile time. + finalize.const + # Unless it's basic template data that won't be + # finalized anyway. + or isinstance(child, nodes.TemplateData) + ): + raise nodes.Impossible() + + const = self._output_child_to_const(child, frame, finalize) + except (nodes.Impossible, Exception): + # The node was not constant and needs to be evaluated at + # runtime. Or another error was raised, which is easier + # to debug at runtime. + body.append(child) + continue + + if body and isinstance(body[-1], list): + body[-1].append(const) + else: + body.append([const]) + + if frame.buffer is not None: + if len(body) == 1: + self.writeline(f"{frame.buffer}.append(") + else: + self.writeline(f"{frame.buffer}.extend((") + + self.indent() + + for item in body: + if isinstance(item, list): + # A group of constant data to join and output. + val = self._output_const_repr(item) + + if frame.buffer is None: + self.writeline("yield " + val) + else: + self.writeline(val + ",") + else: + if frame.buffer is None: + self.writeline("yield ", item) + else: + self.newline(item) + + # A node to be evaluated at runtime. + self._output_child_pre(item, frame, finalize) + self.visit(item, frame) + self._output_child_post(item, frame, finalize) + + if frame.buffer is not None: + self.write(",") + + if frame.buffer is not None: + self.outdent() + self.writeline(")" if len(body) == 1 else "))") + + if frame.require_output_check: + self.outdent() + + def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: + self.push_assign_tracking() + self.newline(node) + self.visit(node.target, frame) + self.write(" = ") + self.visit(node.node, frame) + self.pop_assign_tracking(frame) + + def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: + self.push_assign_tracking() + block_frame = frame.inner() + # This is a special case. Since a set block always captures we + # will disable output checks. This way one can use set blocks + # toplevel even in extended templates. + block_frame.require_output_check = False + block_frame.symbols.analyze_node(node) + self.enter_frame(block_frame) + self.buffer(block_frame) + self.blockvisit(node.body, block_frame) + self.newline(node) + self.visit(node.target, frame) + self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") + if node.filter is not None: + self.visit_Filter(node.filter, block_frame) + else: + self.write(f"concat({block_frame.buffer})") + self.write(")") + self.pop_assign_tracking(frame) + self.leave_frame(block_frame) + + # -- Expression Visitors + + def visit_Name(self, node: nodes.Name, frame: Frame) -> None: + if node.ctx == "store" and ( + frame.toplevel or frame.loop_frame or frame.block_frame + ): + if self._assign_stack: + self._assign_stack[-1].add(node.name) + ref = frame.symbols.ref(node.name) + + # If we are looking up a variable we might have to deal with the + # case where it's undefined. We can skip that case if the load + # instruction indicates a parameter which are always defined. + if node.ctx == "load": + load = frame.symbols.find_load(ref) + if not ( + load is not None + and load[0] == VAR_LOAD_PARAMETER + and not self.parameter_is_undeclared(ref) + ): + self.write( + f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" + ) + return + + self.write(ref) + + def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: + # NSRefs can only be used to store values; since they use the normal + # `foo.bar` notation they will be parsed as a normal attribute access + # when used anywhere but in a `set` context + ref = frame.symbols.ref(node.name) + self.writeline(f"if not isinstance({ref}, Namespace):") + self.indent() + self.writeline( + "raise TemplateRuntimeError" + '("cannot assign attribute on non-namespace object")' + ) + self.outdent() + self.writeline(f"{ref}[{node.attr!r}]") + + def visit_Const(self, node: nodes.Const, frame: Frame) -> None: + val = node.as_const(frame.eval_ctx) + if isinstance(val, float): + self.write(str(val)) + else: + self.write(repr(val)) + + def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: + try: + self.write(repr(node.as_const(frame.eval_ctx))) + except nodes.Impossible: + self.write( + f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" + ) + + def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: + self.write("(") + idx = -1 + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item, frame) + self.write(",)" if idx == 0 else ")") + + def visit_List(self, node: nodes.List, frame: Frame) -> None: + self.write("[") + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item, frame) + self.write("]") + + def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: + self.write("{") + for idx, item in enumerate(node.items): + if idx: + self.write(", ") + self.visit(item.key, frame) + self.write(": ") + self.visit(item.value, frame) + self.write("}") + + visit_Add = _make_binop("+") + visit_Sub = _make_binop("-") + visit_Mul = _make_binop("*") + visit_Div = _make_binop("/") + visit_FloorDiv = _make_binop("//") + visit_Pow = _make_binop("**") + visit_Mod = _make_binop("%") + visit_And = _make_binop("and") + visit_Or = _make_binop("or") + visit_Pos = _make_unop("+") + visit_Neg = _make_unop("-") + visit_Not = _make_unop("not ") + + @optimizeconst + def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: + if frame.eval_ctx.volatile: + func_name = "(markup_join if context.eval_ctx.volatile else str_join)" + elif frame.eval_ctx.autoescape: + func_name = "markup_join" + else: + func_name = "str_join" + self.write(f"{func_name}((") + for arg in node.nodes: + self.visit(arg, frame) + self.write(", ") + self.write("))") + + @optimizeconst + def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: + self.write("(") + self.visit(node.expr, frame) + for op in node.ops: + self.visit(op, frame) + self.write(")") + + def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: + self.write(f" {operators[node.op]} ") + self.visit(node.expr, frame) + + @optimizeconst + def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getattr(") + self.visit(node.node, frame) + self.write(f", {node.attr!r})") + + if self.environment.is_async: + self.write("))") + + @optimizeconst + def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: + # slices bypass the environment getitem method. + if isinstance(node.arg, nodes.Slice): + self.visit(node.node, frame) + self.write("[") + self.visit(node.arg, frame) + self.write("]") + else: + if self.environment.is_async: + self.write("(await auto_await(") + + self.write("environment.getitem(") + self.visit(node.node, frame) + self.write(", ") + self.visit(node.arg, frame) + self.write(")") + + if self.environment.is_async: + self.write("))") + + def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: + if node.start is not None: + self.visit(node.start, frame) + self.write(":") + if node.stop is not None: + self.visit(node.stop, frame) + if node.step is not None: + self.write(":") + self.visit(node.step, frame) + + @contextmanager + def _filter_test_common( + self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool + ) -> t.Iterator[None]: + if self.environment.is_async: + self.write("(await auto_await(") + + if is_filter: + self.write(f"{self.filters[node.name]}(") + func = self.environment.filters.get(node.name) + else: + self.write(f"{self.tests[node.name]}(") + func = self.environment.tests.get(node.name) + + # When inside an If or CondExpr frame, allow the filter to be + # undefined at compile time and only raise an error if it's + # actually called at runtime. See pull_dependencies. + if func is None and not frame.soft_frame: + type_name = "filter" if is_filter else "test" + self.fail(f"No {type_name} named {node.name!r}.", node.lineno) + + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(func) # type: ignore + ) + + if pass_arg is not None: + self.write(f"{pass_arg}, ") + + # Back to the visitor function to handle visiting the target of + # the filter or test. + yield + + self.signature(node, frame) + self.write(")") + + if self.environment.is_async: + self.write("))") + + @optimizeconst + def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: + with self._filter_test_common(node, frame, True): + # if the filter node is None we are inside a filter block + # and want to write to the current buffer + if node.node is not None: + self.visit(node.node, frame) + elif frame.eval_ctx.volatile: + self.write( + f"(Markup(concat({frame.buffer}))" + f" if context.eval_ctx.autoescape else concat({frame.buffer}))" + ) + elif frame.eval_ctx.autoescape: + self.write(f"Markup(concat({frame.buffer}))") + else: + self.write(f"concat({frame.buffer})") + + @optimizeconst + def visit_Test(self, node: nodes.Test, frame: Frame) -> None: + with self._filter_test_common(node, frame, False): + self.visit(node.node, frame) + + @optimizeconst + def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: + frame = frame.soft() + + def write_expr2() -> None: + if node.expr2 is not None: + self.visit(node.expr2, frame) + return + + self.write( + f'cond_expr_undefined("the inline if-expression on' + f" {self.position(node)} evaluated to false and no else" + f' section was defined.")' + ) + + self.write("(") + self.visit(node.expr1, frame) + self.write(" if ") + self.visit(node.test, frame) + self.write(" else ") + write_expr2() + self.write(")") + + @optimizeconst + def visit_Call( + self, node: nodes.Call, frame: Frame, forward_caller: bool = False + ) -> None: + if self.environment.is_async: + self.write("(await auto_await(") + if self.environment.sandboxed: + self.write("environment.call(context, ") + else: + self.write("context.call(") + self.visit(node.node, frame) + extra_kwargs = {"caller": "caller"} if forward_caller else None + loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} + block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} + if extra_kwargs: + extra_kwargs.update(loop_kwargs, **block_kwargs) + elif loop_kwargs or block_kwargs: + extra_kwargs = dict(loop_kwargs, **block_kwargs) + self.signature(node, frame, extra_kwargs) + self.write(")") + if self.environment.is_async: + self.write("))") + + def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: + self.write(node.key + "=") + self.visit(node.value, frame) + + # -- Unused nodes for extensions + + def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: + self.write("Markup(") + self.visit(node.expr, frame) + self.write(")") + + def visit_MarkSafeIfAutoescape( + self, node: nodes.MarkSafeIfAutoescape, frame: Frame + ) -> None: + self.write("(Markup if context.eval_ctx.autoescape else identity)(") + self.visit(node.expr, frame) + self.write(")") + + def visit_EnvironmentAttribute( + self, node: nodes.EnvironmentAttribute, frame: Frame + ) -> None: + self.write("environment." + node.name) + + def visit_ExtensionAttribute( + self, node: nodes.ExtensionAttribute, frame: Frame + ) -> None: + self.write(f"environment.extensions[{node.identifier!r}].{node.name}") + + def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: + self.write(self.import_aliases[node.importname]) + + def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: + self.write(node.name) + + def visit_ContextReference( + self, node: nodes.ContextReference, frame: Frame + ) -> None: + self.write("context") + + def visit_DerivedContextReference( + self, node: nodes.DerivedContextReference, frame: Frame + ) -> None: + self.write(self.derive_context(frame)) + + def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: + self.writeline("continue", node) + + def visit_Break(self, node: nodes.Break, frame: Frame) -> None: + self.writeline("break", node) + + def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: + scope_frame = frame.inner() + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + + def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: + ctx = self.temporary_identifier() + self.writeline(f"{ctx} = {self.derive_context(frame)}") + self.writeline(f"{ctx}.vars = ") + self.visit(node.context, frame) + self.push_context_reference(ctx) + + scope_frame = frame.inner(isolated=True) + scope_frame.symbols.analyze_node(node) + self.enter_frame(scope_frame) + self.blockvisit(node.body, scope_frame) + self.leave_frame(scope_frame) + self.pop_context_reference() + + def visit_EvalContextModifier( + self, node: nodes.EvalContextModifier, frame: Frame + ) -> None: + for keyword in node.options: + self.writeline(f"context.eval_ctx.{keyword.key} = ") + self.visit(keyword.value, frame) + try: + val = keyword.value.as_const(frame.eval_ctx) + except nodes.Impossible: + frame.eval_ctx.volatile = True + else: + setattr(frame.eval_ctx, keyword.key, val) + + def visit_ScopedEvalContextModifier( + self, node: nodes.ScopedEvalContextModifier, frame: Frame + ) -> None: + old_ctx_name = self.temporary_identifier() + saved_ctx = frame.eval_ctx.save() + self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") + self.visit_EvalContextModifier(node, frame) + for child in node.body: + self.visit(child, frame) + frame.eval_ctx.revert(saved_ctx) + self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/backend/test/lib/python3.8/site-packages/jinja2/constants.py b/backend/test/lib/python3.8/site-packages/jinja2/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..41a1c23b0a7fe134b1f662545876eb65b31b071e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/constants.py @@ -0,0 +1,20 @@ +#: list of lorem ipsum words used by the lipsum() helper function +LOREM_IPSUM_WORDS = """\ +a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at +auctor augue bibendum blandit class commodo condimentum congue consectetuer +consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus +diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend +elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames +faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac +hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum +justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem +luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie +mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non +nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque +penatibus per pharetra phasellus placerat platea porta porttitor posuere +potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus +ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit +sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor +tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices +ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus +viverra volutpat vulputate""" diff --git a/backend/test/lib/python3.8/site-packages/jinja2/debug.py b/backend/test/lib/python3.8/site-packages/jinja2/debug.py new file mode 100644 index 0000000000000000000000000000000000000000..7ed7e9297e01b87c4e999d19d48a4265b38b574f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/debug.py @@ -0,0 +1,191 @@ +import sys +import typing as t +from types import CodeType +from types import TracebackType + +from .exceptions import TemplateSyntaxError +from .utils import internal_code +from .utils import missing + +if t.TYPE_CHECKING: + from .runtime import Context + + +def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: + """Rewrite the current exception to replace any tracebacks from + within compiled template code with tracebacks that look like they + came from the template source. + + This must be called within an ``except`` block. + + :param source: For ``TemplateSyntaxError``, the original source if + known. + :return: The original exception with the rewritten traceback. + """ + _, exc_value, tb = sys.exc_info() + exc_value = t.cast(BaseException, exc_value) + tb = t.cast(TracebackType, tb) + + if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: + exc_value.translated = True + exc_value.source = source + # Remove the old traceback, otherwise the frames from the + # compiler still show up. + exc_value.with_traceback(None) + # Outside of runtime, so the frame isn't executing template + # code, but it still needs to point at the template. + tb = fake_traceback( + exc_value, None, exc_value.filename or "<unknown>", exc_value.lineno + ) + else: + # Skip the frame for the render function. + tb = tb.tb_next + + stack = [] + + # Build the stack of traceback object, replacing any in template + # code with the source file and line information. + while tb is not None: + # Skip frames decorated with @internalcode. These are internal + # calls that aren't useful in template debugging output. + if tb.tb_frame.f_code in internal_code: + tb = tb.tb_next + continue + + template = tb.tb_frame.f_globals.get("__jinja_template__") + + if template is not None: + lineno = template.get_corresponding_lineno(tb.tb_lineno) + fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) + stack.append(fake_tb) + else: + stack.append(tb) + + tb = tb.tb_next + + tb_next = None + + # Assign tb_next in reverse to avoid circular references. + for tb in reversed(stack): + tb.tb_next = tb_next + tb_next = tb + + return exc_value.with_traceback(tb_next) + + +def fake_traceback( # type: ignore + exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int +) -> TracebackType: + """Produce a new traceback object that looks like it came from the + template source instead of the compiled code. The filename, line + number, and location name will point to the template, and the local + variables will be the current template context. + + :param exc_value: The original exception to be re-raised to create + the new traceback. + :param tb: The original traceback to get the local variables and + code info from. + :param filename: The template filename. + :param lineno: The line number in the template source. + """ + if tb is not None: + # Replace the real locals with the context that would be + # available at that point in the template. + locals = get_template_locals(tb.tb_frame.f_locals) + locals.pop("__jinja_exception__", None) + else: + locals = {} + + globals = { + "__name__": filename, + "__file__": filename, + "__jinja_exception__": exc_value, + } + # Raise an exception at the correct line number. + code: CodeType = compile( + "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" + ) + + # Build a new code object that points to the template file and + # replaces the location with a block name. + location = "template" + + if tb is not None: + function = tb.tb_frame.f_code.co_name + + if function == "root": + location = "top-level template code" + elif function.startswith("block_"): + location = f"block {function[6:]!r}" + + if sys.version_info >= (3, 8): + code = code.replace(co_name=location) + else: + code = CodeType( + code.co_argcount, + code.co_kwonlyargcount, + code.co_nlocals, + code.co_stacksize, + code.co_flags, + code.co_code, + code.co_consts, + code.co_names, + code.co_varnames, + code.co_filename, + location, + code.co_firstlineno, + code.co_lnotab, + code.co_freevars, + code.co_cellvars, + ) + + # Execute the new code, which is guaranteed to raise, and return + # the new traceback without this frame. + try: + exec(code, globals, locals) + except BaseException: + return sys.exc_info()[2].tb_next # type: ignore + + +def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: + """Based on the runtime locals, get the context that would be + available at that point in the template. + """ + # Start with the current template context. + ctx: "t.Optional[Context]" = real_locals.get("context") + + if ctx is not None: + data: t.Dict[str, t.Any] = ctx.get_all().copy() + else: + data = {} + + # Might be in a derived context that only sets local variables + # rather than pushing a context. Local variables follow the scheme + # l_depth_name. Find the highest-depth local that has a value for + # each name. + local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} + + for name, value in real_locals.items(): + if not name.startswith("l_") or value is missing: + # Not a template variable, or no longer relevant. + continue + + try: + _, depth_str, name = name.split("_", 2) + depth = int(depth_str) + except ValueError: + continue + + cur_depth = local_overrides.get(name, (-1,))[0] + + if cur_depth < depth: + local_overrides[name] = (depth, value) + + # Modify the context with any derived context. + for name, (_, value) in local_overrides.items(): + if value is missing: + data.pop(name, None) + else: + data[name] = value + + return data diff --git a/backend/test/lib/python3.8/site-packages/jinja2/defaults.py b/backend/test/lib/python3.8/site-packages/jinja2/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..638cad3d2d8907330bde56e2b76c9b185c523b45 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/defaults.py @@ -0,0 +1,48 @@ +import typing as t + +from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 +from .tests import TESTS as DEFAULT_TESTS # noqa: F401 +from .utils import Cycler +from .utils import generate_lorem_ipsum +from .utils import Joiner +from .utils import Namespace + +if t.TYPE_CHECKING: + import typing_extensions as te + +# defaults for the parser / lexer +BLOCK_START_STRING = "{%" +BLOCK_END_STRING = "%}" +VARIABLE_START_STRING = "{{" +VARIABLE_END_STRING = "}}" +COMMENT_START_STRING = "{#" +COMMENT_END_STRING = "#}" +LINE_STATEMENT_PREFIX: t.Optional[str] = None +LINE_COMMENT_PREFIX: t.Optional[str] = None +TRIM_BLOCKS = False +LSTRIP_BLOCKS = False +NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" +KEEP_TRAILING_NEWLINE = False + +# default filters, tests and namespace + +DEFAULT_NAMESPACE = { + "range": range, + "dict": dict, + "lipsum": generate_lorem_ipsum, + "cycler": Cycler, + "joiner": Joiner, + "namespace": Namespace, +} + +# default policies +DEFAULT_POLICIES: t.Dict[str, t.Any] = { + "compiler.ascii_str": True, + "urlize.rel": "noopener", + "urlize.target": None, + "urlize.extra_schemes": None, + "truncate.leeway": 5, + "json.dumps_function": None, + "json.dumps_kwargs": {"sort_keys": True}, + "ext.i18n.trimmed": False, +} diff --git a/backend/test/lib/python3.8/site-packages/jinja2/environment.py b/backend/test/lib/python3.8/site-packages/jinja2/environment.py new file mode 100644 index 0000000000000000000000000000000000000000..ea04e8b44330fe22909a2c875c6601e33bd1ffc2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/environment.py @@ -0,0 +1,1667 @@ +"""Classes for managing templates and their runtime and compile time +options. +""" +import os +import typing +import typing as t +import weakref +from collections import ChainMap +from functools import lru_cache +from functools import partial +from functools import reduce +from types import CodeType + +from markupsafe import Markup + +from . import nodes +from .compiler import CodeGenerator +from .compiler import generate +from .defaults import BLOCK_END_STRING +from .defaults import BLOCK_START_STRING +from .defaults import COMMENT_END_STRING +from .defaults import COMMENT_START_STRING +from .defaults import DEFAULT_FILTERS +from .defaults import DEFAULT_NAMESPACE +from .defaults import DEFAULT_POLICIES +from .defaults import DEFAULT_TESTS +from .defaults import KEEP_TRAILING_NEWLINE +from .defaults import LINE_COMMENT_PREFIX +from .defaults import LINE_STATEMENT_PREFIX +from .defaults import LSTRIP_BLOCKS +from .defaults import NEWLINE_SEQUENCE +from .defaults import TRIM_BLOCKS +from .defaults import VARIABLE_END_STRING +from .defaults import VARIABLE_START_STRING +from .exceptions import TemplateNotFound +from .exceptions import TemplateRuntimeError +from .exceptions import TemplatesNotFound +from .exceptions import TemplateSyntaxError +from .exceptions import UndefinedError +from .lexer import get_lexer +from .lexer import Lexer +from .lexer import TokenStream +from .nodes import EvalContext +from .parser import Parser +from .runtime import Context +from .runtime import new_context +from .runtime import Undefined +from .utils import _PassArg +from .utils import concat +from .utils import consume +from .utils import import_string +from .utils import internalcode +from .utils import LRUCache +from .utils import missing + +if t.TYPE_CHECKING: + import typing_extensions as te + from .bccache import BytecodeCache + from .ext import Extension + from .loaders import BaseLoader + +_env_bound = t.TypeVar("_env_bound", bound="Environment") + + +# for direct template usage we have up to ten living environments +@lru_cache(maxsize=10) +def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: + """Return a new spontaneous environment. A spontaneous environment + is used for templates created directly rather than through an + existing environment. + + :param cls: Environment class to create. + :param args: Positional arguments passed to environment. + """ + env = cls(*args) + env.shared = True + return env + + +def create_cache( + size: int, +) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: + """Return the cache class for the given size.""" + if size == 0: + return None + + if size < 0: + return {} + + return LRUCache(size) # type: ignore + + +def copy_cache( + cache: t.Optional[t.MutableMapping], +) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: + """Create an empty copy of the given cache.""" + if cache is None: + return None + + if type(cache) is dict: + return {} + + return LRUCache(cache.capacity) # type: ignore + + +def load_extensions( + environment: "Environment", + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], +) -> t.Dict[str, "Extension"]: + """Load the extensions from the list and bind it to the environment. + Returns a dict of instantiated extensions. + """ + result = {} + + for extension in extensions: + if isinstance(extension, str): + extension = t.cast(t.Type["Extension"], import_string(extension)) + + result[extension.identifier] = extension(environment) + + return result + + +def _environment_config_check(environment: "Environment") -> "Environment": + """Perform a sanity check on the environment.""" + assert issubclass( + environment.undefined, Undefined + ), "'undefined' must be a subclass of 'jinja2.Undefined'." + assert ( + environment.block_start_string + != environment.variable_start_string + != environment.comment_start_string + ), "block, variable and comment start strings must be different." + assert environment.newline_sequence in { + "\r", + "\r\n", + "\n", + }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." + return environment + + +class Environment: + r"""The core component of Jinja is the `Environment`. It contains + important shared variables like configuration, filters, tests, + globals and others. Instances of this class may be modified if + they are not shared and if no template was loaded so far. + Modifications on environments after the first template was loaded + will lead to surprising effects and undefined behavior. + + Here are the possible initialization parameters: + + `block_start_string` + The string marking the beginning of a block. Defaults to ``'{%'``. + + `block_end_string` + The string marking the end of a block. Defaults to ``'%}'``. + + `variable_start_string` + The string marking the beginning of a print statement. + Defaults to ``'{{'``. + + `variable_end_string` + The string marking the end of a print statement. Defaults to + ``'}}'``. + + `comment_start_string` + The string marking the beginning of a comment. Defaults to ``'{#'``. + + `comment_end_string` + The string marking the end of a comment. Defaults to ``'#}'``. + + `line_statement_prefix` + If given and a string, this will be used as prefix for line based + statements. See also :ref:`line-statements`. + + `line_comment_prefix` + If given and a string, this will be used as prefix for line based + comments. See also :ref:`line-statements`. + + .. versionadded:: 2.2 + + `trim_blocks` + If this is set to ``True`` the first newline after a block is + removed (block, not variable tag!). Defaults to `False`. + + `lstrip_blocks` + If this is set to ``True`` leading spaces and tabs are stripped + from the start of a line to a block. Defaults to `False`. + + `newline_sequence` + The sequence that starts a newline. Must be one of ``'\r'``, + ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a + useful default for Linux and OS X systems as well as web + applications. + + `keep_trailing_newline` + Preserve the trailing newline when rendering templates. + The default is ``False``, which causes a single newline, + if present, to be stripped from the end of the template. + + .. versionadded:: 2.7 + + `extensions` + List of Jinja extensions to use. This can either be import paths + as strings or extension classes. For more information have a + look at :ref:`the extensions documentation <jinja-extensions>`. + + `optimized` + should the optimizer be enabled? Default is ``True``. + + `undefined` + :class:`Undefined` or a subclass of it that is used to represent + undefined values in the template. + + `finalize` + A callable that can be used to process the result of a variable + expression before it is output. For example one can convert + ``None`` implicitly into an empty string here. + + `autoescape` + If set to ``True`` the XML/HTML autoescaping feature is enabled by + default. For more details about autoescaping see + :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also + be a callable that is passed the template name and has to + return ``True`` or ``False`` depending on autoescape should be + enabled by default. + + .. versionchanged:: 2.4 + `autoescape` can now be a function + + `loader` + The template loader for this environment. + + `cache_size` + The size of the cache. Per default this is ``400`` which means + that if more than 400 templates are loaded the loader will clean + out the least recently used template. If the cache size is set to + ``0`` templates are recompiled all the time, if the cache size is + ``-1`` the cache will not be cleaned. + + .. versionchanged:: 2.8 + The cache size was increased to 400 from a low 50. + + `auto_reload` + Some loaders load templates from locations where the template + sources may change (ie: file system or database). If + ``auto_reload`` is set to ``True`` (default) every time a template is + requested the loader checks if the source changed and if yes, it + will reload the template. For higher performance it's possible to + disable that. + + `bytecode_cache` + If set to a bytecode cache object, this object will provide a + cache for the internal Jinja bytecode so that templates don't + have to be parsed if they were not changed. + + See :ref:`bytecode-cache` for more information. + + `enable_async` + If set to true this enables async template execution which + allows using async functions and generators. + """ + + #: if this environment is sandboxed. Modifying this variable won't make + #: the environment sandboxed though. For a real sandboxed environment + #: have a look at jinja2.sandbox. This flag alone controls the code + #: generation by the compiler. + sandboxed = False + + #: True if the environment is just an overlay + overlayed = False + + #: the environment this environment is linked to if it is an overlay + linked_to: t.Optional["Environment"] = None + + #: shared environments have this set to `True`. A shared environment + #: must not be modified + shared = False + + #: the class that is used for code generation. See + #: :class:`~jinja2.compiler.CodeGenerator` for more information. + code_generator_class: t.Type["CodeGenerator"] = CodeGenerator + + concat = "".join + + #: the context class that is used for templates. See + #: :class:`~jinja2.runtime.Context` for more information. + context_class: t.Type[Context] = Context + + template_class: t.Type["Template"] + + def __init__( + self, + block_start_string: str = BLOCK_START_STRING, + block_end_string: str = BLOCK_END_STRING, + variable_start_string: str = VARIABLE_START_STRING, + variable_end_string: str = VARIABLE_END_STRING, + comment_start_string: str = COMMENT_START_STRING, + comment_end_string: str = COMMENT_END_STRING, + line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, + line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, + trim_blocks: bool = TRIM_BLOCKS, + lstrip_blocks: bool = LSTRIP_BLOCKS, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, + keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), + optimized: bool = True, + undefined: t.Type[Undefined] = Undefined, + finalize: t.Optional[t.Callable[..., t.Any]] = None, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, + loader: t.Optional["BaseLoader"] = None, + cache_size: int = 400, + auto_reload: bool = True, + bytecode_cache: t.Optional["BytecodeCache"] = None, + enable_async: bool = False, + ): + # !!Important notice!! + # The constructor accepts quite a few arguments that should be + # passed by keyword rather than position. However it's important to + # not change the order of arguments because it's used at least + # internally in those cases: + # - spontaneous environments (i18n extension and Template) + # - unittests + # If parameter changes are required only add parameters at the end + # and don't change the arguments (or the defaults!) of the arguments + # existing already. + + # lexer / parser information + self.block_start_string = block_start_string + self.block_end_string = block_end_string + self.variable_start_string = variable_start_string + self.variable_end_string = variable_end_string + self.comment_start_string = comment_start_string + self.comment_end_string = comment_end_string + self.line_statement_prefix = line_statement_prefix + self.line_comment_prefix = line_comment_prefix + self.trim_blocks = trim_blocks + self.lstrip_blocks = lstrip_blocks + self.newline_sequence = newline_sequence + self.keep_trailing_newline = keep_trailing_newline + + # runtime information + self.undefined: t.Type[Undefined] = undefined + self.optimized = optimized + self.finalize = finalize + self.autoescape = autoescape + + # defaults + self.filters = DEFAULT_FILTERS.copy() + self.tests = DEFAULT_TESTS.copy() + self.globals = DEFAULT_NAMESPACE.copy() + + # set the loader provided + self.loader = loader + self.cache = create_cache(cache_size) + self.bytecode_cache = bytecode_cache + self.auto_reload = auto_reload + + # configurable policies + self.policies = DEFAULT_POLICIES.copy() + + # load extensions + self.extensions = load_extensions(self, extensions) + + self.is_async = enable_async + _environment_config_check(self) + + def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: + """Adds an extension after the environment was created. + + .. versionadded:: 2.5 + """ + self.extensions.update(load_extensions(self, [extension])) + + def extend(self, **attributes: t.Any) -> None: + """Add the items to the instance of the environment if they do not exist + yet. This is used by :ref:`extensions <writing-extensions>` to register + callbacks and configuration values without breaking inheritance. + """ + for key, value in attributes.items(): + if not hasattr(self, key): + setattr(self, key, value) + + def overlay( + self, + block_start_string: str = missing, + block_end_string: str = missing, + variable_start_string: str = missing, + variable_end_string: str = missing, + comment_start_string: str = missing, + comment_end_string: str = missing, + line_statement_prefix: t.Optional[str] = missing, + line_comment_prefix: t.Optional[str] = missing, + trim_blocks: bool = missing, + lstrip_blocks: bool = missing, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, + keep_trailing_newline: bool = missing, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, + optimized: bool = missing, + undefined: t.Type[Undefined] = missing, + finalize: t.Optional[t.Callable[..., t.Any]] = missing, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, + loader: t.Optional["BaseLoader"] = missing, + cache_size: int = missing, + auto_reload: bool = missing, + bytecode_cache: t.Optional["BytecodeCache"] = missing, + enable_async: bool = False, + ) -> "Environment": + """Create a new overlay environment that shares all the data with the + current environment except for cache and the overridden attributes. + Extensions cannot be removed for an overlayed environment. An overlayed + environment automatically gets all the extensions of the environment it + is linked to plus optional extra extensions. + + Creating overlays should happen after the initial environment was set + up completely. Not all attributes are truly linked, some are just + copied over so modifications on the original environment may not shine + through. + + .. versionchanged:: 3.1.2 + Added the ``newline_sequence``,, ``keep_trailing_newline``, + and ``enable_async`` parameters to match ``__init__``. + """ + args = dict(locals()) + del args["self"], args["cache_size"], args["extensions"], args["enable_async"] + + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.overlayed = True + rv.linked_to = self + + for key, value in args.items(): + if value is not missing: + setattr(rv, key, value) + + if cache_size is not missing: + rv.cache = create_cache(cache_size) + else: + rv.cache = copy_cache(self.cache) + + rv.extensions = {} + for key, value in self.extensions.items(): + rv.extensions[key] = value.bind(rv) + if extensions is not missing: + rv.extensions.update(load_extensions(rv, extensions)) + + if enable_async is not missing: + rv.is_async = enable_async + + return _environment_config_check(rv) + + @property + def lexer(self) -> Lexer: + """The lexer for this environment.""" + return get_lexer(self) + + def iter_extensions(self) -> t.Iterator["Extension"]: + """Iterates over the extensions by priority.""" + return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) + + def getitem( + self, obj: t.Any, argument: t.Union[str, t.Any] + ) -> t.Union[t.Any, Undefined]: + """Get an item or attribute of an object but prefer the item.""" + try: + return obj[argument] + except (AttributeError, TypeError, LookupError): + if isinstance(argument, str): + try: + attr = str(argument) + except Exception: + pass + else: + try: + return getattr(obj, attr) + except AttributeError: + pass + return self.undefined(obj=obj, name=argument) + + def getattr(self, obj: t.Any, attribute: str) -> t.Any: + """Get an item or attribute of an object but prefer the attribute. + Unlike :meth:`getitem` the attribute *must* be a string. + """ + try: + return getattr(obj, attribute) + except AttributeError: + pass + try: + return obj[attribute] + except (TypeError, LookupError, AttributeError): + return self.undefined(obj=obj, name=attribute) + + def _filter_test_common( + self, + name: t.Union[str, Undefined], + value: t.Any, + args: t.Optional[t.Sequence[t.Any]], + kwargs: t.Optional[t.Mapping[str, t.Any]], + context: t.Optional[Context], + eval_ctx: t.Optional[EvalContext], + is_filter: bool, + ) -> t.Any: + if is_filter: + env_map = self.filters + type_name = "filter" + else: + env_map = self.tests + type_name = "test" + + func = env_map.get(name) # type: ignore + + if func is None: + msg = f"No {type_name} named {name!r}." + + if isinstance(name, Undefined): + try: + name._fail_with_undefined_error() + except Exception as e: + msg = f"{msg} ({e}; did you forget to quote the callable name?)" + + raise TemplateRuntimeError(msg) + + args = [value, *(args if args is not None else ())] + kwargs = kwargs if kwargs is not None else {} + pass_arg = _PassArg.from_obj(func) + + if pass_arg is _PassArg.context: + if context is None: + raise TemplateRuntimeError( + f"Attempted to invoke a context {type_name} without context." + ) + + args.insert(0, context) + elif pass_arg is _PassArg.eval_context: + if eval_ctx is None: + if context is not None: + eval_ctx = context.eval_ctx + else: + eval_ctx = EvalContext(self) + + args.insert(0, eval_ctx) + elif pass_arg is _PassArg.environment: + args.insert(0, self) + + return func(*args, **kwargs) + + def call_filter( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a filter on a value the same way the compiler does. + + This might return a coroutine if the filter is running from an + environment in async mode and the filter supports async + execution. It's your responsibility to await this if needed. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, True + ) + + def call_test( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a test on a value the same way the compiler does. + + This might return a coroutine if the test is running from an + environment in async mode and the test supports async execution. + It's your responsibility to await this if needed. + + .. versionchanged:: 3.0 + Tests support ``@pass_context``, etc. decorators. Added + the ``context`` and ``eval_ctx`` parameters. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, False + ) + + @internalcode + def parse( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> nodes.Template: + """Parse the sourcecode and return the abstract syntax tree. This + tree of nodes is used by the compiler to convert the template into + executable source- or bytecode. This is useful for debugging or to + extract information from templates. + + If you are :ref:`developing Jinja extensions <writing-extensions>` + this gives you a good overview of the node tree generated. + """ + try: + return self._parse(source, name, filename) + except TemplateSyntaxError: + self.handle_exception(source=source) + + def _parse( + self, source: str, name: t.Optional[str], filename: t.Optional[str] + ) -> nodes.Template: + """Internal parsing function used by `parse` and `compile`.""" + return Parser(self, source, name, filename).parse() + + def lex( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> t.Iterator[t.Tuple[int, str, str]]: + """Lex the given sourcecode and return a generator that yields + tokens as tuples in the form ``(lineno, token_type, value)``. + This can be useful for :ref:`extension development <writing-extensions>` + and debugging templates. + + This does not perform preprocessing. If you want the preprocessing + of the extensions to be applied you have to filter source through + the :meth:`preprocess` method. + """ + source = str(source) + try: + return self.lexer.tokeniter(source, name, filename) + except TemplateSyntaxError: + self.handle_exception(source=source) + + def preprocess( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> str: + """Preprocesses the source with all extensions. This is automatically + called for all parsing and compiling methods but *not* for :meth:`lex` + because there you usually only want the actual source tokenized. + """ + return reduce( + lambda s, e: e.preprocess(s, name, filename), + self.iter_extensions(), + str(source), + ) + + def _tokenize( + self, + source: str, + name: t.Optional[str], + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> TokenStream: + """Called by the parser to do the preprocessing and filtering + for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. + """ + source = self.preprocess(source, name, filename) + stream = self.lexer.tokenize(source, name, filename, state) + + for ext in self.iter_extensions(): + stream = ext.filter_stream(stream) # type: ignore + + if not isinstance(stream, TokenStream): + stream = TokenStream(stream, name, filename) # type: ignore + + return stream + + def _generate( + self, + source: nodes.Template, + name: t.Optional[str], + filename: t.Optional[str], + defer_init: bool = False, + ) -> str: + """Internal hook that can be overridden to hook a different generate + method in. + + .. versionadded:: 2.5 + """ + return generate( # type: ignore + source, + self, + name, + filename, + defer_init=defer_init, + optimized=self.optimized, + ) + + def _compile(self, source: str, filename: str) -> CodeType: + """Internal hook that can be overridden to hook a different compile + method in. + + .. versionadded:: 2.5 + """ + return compile(source, filename, "exec") # type: ignore + + @typing.overload + def compile( # type: ignore + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[False]" = False, + defer_init: bool = False, + ) -> CodeType: + ... + + @typing.overload + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[True]" = ..., + defer_init: bool = False, + ) -> str: + ... + + @internalcode + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: bool = False, + defer_init: bool = False, + ) -> t.Union[str, CodeType]: + """Compile a node or template source code. The `name` parameter is + the load name of the template after it was joined using + :meth:`join_path` if necessary, not the filename on the file system. + the `filename` parameter is the estimated filename of the template on + the file system. If the template came from a database or memory this + can be omitted. + + The return value of this method is a python code object. If the `raw` + parameter is `True` the return value will be a string with python + code equivalent to the bytecode returned otherwise. This method is + mainly used internally. + + `defer_init` is use internally to aid the module code generator. This + causes the generated code to be able to import without the global + environment variable to be set. + + .. versionadded:: 2.4 + `defer_init` parameter added. + """ + source_hint = None + try: + if isinstance(source, str): + source_hint = source + source = self._parse(source, name, filename) + source = self._generate(source, name, filename, defer_init=defer_init) + if raw: + return source + if filename is None: + filename = "<template>" + return self._compile(source, filename) + except TemplateSyntaxError: + self.handle_exception(source=source_hint) + + def compile_expression( + self, source: str, undefined_to_none: bool = True + ) -> "TemplateExpression": + """A handy helper method that returns a callable that accepts keyword + arguments that appear as variables in the expression. If called it + returns the result of the expression. + + This is useful if applications want to use the same rules as Jinja + in template "configuration files" or similar situations. + + Example usage: + + >>> env = Environment() + >>> expr = env.compile_expression('foo == 42') + >>> expr(foo=23) + False + >>> expr(foo=42) + True + + Per default the return value is converted to `None` if the + expression returns an undefined value. This can be changed + by setting `undefined_to_none` to `False`. + + >>> env.compile_expression('var')() is None + True + >>> env.compile_expression('var', undefined_to_none=False)() + Undefined + + .. versionadded:: 2.1 + """ + parser = Parser(self, source, state="variable") + try: + expr = parser.parse_expression() + if not parser.stream.eos: + raise TemplateSyntaxError( + "chunk after expression", parser.stream.current.lineno, None, None + ) + expr.set_environment(self) + except TemplateSyntaxError: + self.handle_exception(source=source) + + body = [nodes.Assign(nodes.Name("result", "store"), expr, lineno=1)] + template = self.from_string(nodes.Template(body, lineno=1)) + return TemplateExpression(template, undefined_to_none) + + def compile_templates( + self, + target: t.Union[str, os.PathLike], + extensions: t.Optional[t.Collection[str]] = None, + filter_func: t.Optional[t.Callable[[str], bool]] = None, + zip: t.Optional[str] = "deflated", + log_function: t.Optional[t.Callable[[str], None]] = None, + ignore_errors: bool = True, + ) -> None: + """Finds all the templates the loader can find, compiles them + and stores them in `target`. If `zip` is `None`, instead of in a + zipfile, the templates will be stored in a directory. + By default a deflate zip algorithm is used. To switch to + the stored algorithm, `zip` can be set to ``'stored'``. + + `extensions` and `filter_func` are passed to :meth:`list_templates`. + Each template returned will be compiled to the target folder or + zipfile. + + By default template compilation errors are ignored. In case a + log function is provided, errors are logged. If you want template + syntax errors to abort the compilation you can set `ignore_errors` + to `False` and you will get an exception on syntax errors. + + .. versionadded:: 2.4 + """ + from .loaders import ModuleLoader + + if log_function is None: + + def log_function(x: str) -> None: + pass + + assert log_function is not None + assert self.loader is not None, "No loader configured." + + def write_file(filename: str, data: str) -> None: + if zip: + info = ZipInfo(filename) + info.external_attr = 0o755 << 16 + zip_file.writestr(info, data) + else: + with open(os.path.join(target, filename), "wb") as f: + f.write(data.encode("utf8")) + + if zip is not None: + from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED, ZIP_STORED + + zip_file = ZipFile( + target, "w", dict(deflated=ZIP_DEFLATED, stored=ZIP_STORED)[zip] + ) + log_function(f"Compiling into Zip archive {target!r}") + else: + if not os.path.isdir(target): + os.makedirs(target) + log_function(f"Compiling into folder {target!r}") + + try: + for name in self.list_templates(extensions, filter_func): + source, filename, _ = self.loader.get_source(self, name) + try: + code = self.compile(source, name, filename, True, True) + except TemplateSyntaxError as e: + if not ignore_errors: + raise + log_function(f'Could not compile "{name}": {e}') + continue + + filename = ModuleLoader.get_module_filename(name) + + write_file(filename, code) + log_function(f'Compiled "{name}" as {filename}') + finally: + if zip: + zip_file.close() + + log_function("Finished compiling templates") + + def list_templates( + self, + extensions: t.Optional[t.Collection[str]] = None, + filter_func: t.Optional[t.Callable[[str], bool]] = None, + ) -> t.List[str]: + """Returns a list of templates for this environment. This requires + that the loader supports the loader's + :meth:`~BaseLoader.list_templates` method. + + If there are other files in the template folder besides the + actual templates, the returned list can be filtered. There are two + ways: either `extensions` is set to a list of file extensions for + templates, or a `filter_func` can be provided which is a callable that + is passed a template name and should return `True` if it should end up + in the result list. + + If the loader does not support that, a :exc:`TypeError` is raised. + + .. versionadded:: 2.4 + """ + assert self.loader is not None, "No loader configured." + names = self.loader.list_templates() + + if extensions is not None: + if filter_func is not None: + raise TypeError( + "either extensions or filter_func can be passed, but not both" + ) + + def filter_func(x: str) -> bool: + return "." in x and x.rsplit(".", 1)[1] in extensions # type: ignore + + if filter_func is not None: + names = [name for name in names if filter_func(name)] + + return names + + def handle_exception(self, source: t.Optional[str] = None) -> "te.NoReturn": + """Exception handling helper. This is used internally to either raise + rewritten exceptions or return a rendered traceback for the template. + """ + from .debug import rewrite_traceback_stack + + raise rewrite_traceback_stack(source=source) + + def join_path(self, template: str, parent: str) -> str: + """Join a template with the parent. By default all the lookups are + relative to the loader root so this method returns the `template` + parameter unchanged, but if the paths should be relative to the + parent template, this function can be used to calculate the real + template name. + + Subclasses may override this method and implement template path + joining here. + """ + return template + + @internalcode + def _load_template( + self, name: str, globals: t.Optional[t.MutableMapping[str, t.Any]] + ) -> "Template": + if self.loader is None: + raise TypeError("no loader for this environment specified") + cache_key = (weakref.ref(self.loader), name) + if self.cache is not None: + template = self.cache.get(cache_key) + if template is not None and ( + not self.auto_reload or template.is_up_to_date + ): + # template.globals is a ChainMap, modifying it will only + # affect the template, not the environment globals. + if globals: + template.globals.update(globals) + + return template + + template = self.loader.load(self, name, self.make_globals(globals)) + + if self.cache is not None: + self.cache[cache_key] = template + return template + + @internalcode + def get_template( + self, + name: t.Union[str, "Template"], + parent: t.Optional[str] = None, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + """Load a template by name with :attr:`loader` and return a + :class:`Template`. If the template does not exist a + :exc:`TemplateNotFound` exception is raised. + + :param name: Name of the template to load. When loading + templates from the filesystem, "/" is used as the path + separator, even on Windows. + :param parent: The name of the parent template importing this + template. :meth:`join_path` can be used to implement name + transformations with this. + :param globals: Extend the environment :attr:`globals` with + these extra variables available for all renders of this + template. If the template has already been loaded and + cached, its globals are updated with any new items. + + .. versionchanged:: 3.0 + If a template is loaded from cache, ``globals`` will update + the template's globals instead of ignoring the new values. + + .. versionchanged:: 2.4 + If ``name`` is a :class:`Template` object it is returned + unchanged. + """ + if isinstance(name, Template): + return name + if parent is not None: + name = self.join_path(name, parent) + + return self._load_template(name, globals) + + @internalcode + def select_template( + self, + names: t.Iterable[t.Union[str, "Template"]], + parent: t.Optional[str] = None, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + """Like :meth:`get_template`, but tries loading multiple names. + If none of the names can be loaded a :exc:`TemplatesNotFound` + exception is raised. + + :param names: List of template names to try loading in order. + :param parent: The name of the parent template importing this + template. :meth:`join_path` can be used to implement name + transformations with this. + :param globals: Extend the environment :attr:`globals` with + these extra variables available for all renders of this + template. If the template has already been loaded and + cached, its globals are updated with any new items. + + .. versionchanged:: 3.0 + If a template is loaded from cache, ``globals`` will update + the template's globals instead of ignoring the new values. + + .. versionchanged:: 2.11 + If ``names`` is :class:`Undefined`, an :exc:`UndefinedError` + is raised instead. If no templates were found and ``names`` + contains :class:`Undefined`, the message is more helpful. + + .. versionchanged:: 2.4 + If ``names`` contains a :class:`Template` object it is + returned unchanged. + + .. versionadded:: 2.3 + """ + if isinstance(names, Undefined): + names._fail_with_undefined_error() + + if not names: + raise TemplatesNotFound( + message="Tried to select from an empty list of templates." + ) + + for name in names: + if isinstance(name, Template): + return name + if parent is not None: + name = self.join_path(name, parent) + try: + return self._load_template(name, globals) + except (TemplateNotFound, UndefinedError): + pass + raise TemplatesNotFound(names) # type: ignore + + @internalcode + def get_or_select_template( + self, + template_name_or_list: t.Union[ + str, "Template", t.List[t.Union[str, "Template"]] + ], + parent: t.Optional[str] = None, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + """Use :meth:`select_template` if an iterable of template names + is given, or :meth:`get_template` if one name is given. + + .. versionadded:: 2.3 + """ + if isinstance(template_name_or_list, (str, Undefined)): + return self.get_template(template_name_or_list, parent, globals) + elif isinstance(template_name_or_list, Template): + return template_name_or_list + return self.select_template(template_name_or_list, parent, globals) + + def from_string( + self, + source: t.Union[str, nodes.Template], + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + template_class: t.Optional[t.Type["Template"]] = None, + ) -> "Template": + """Load a template from a source string without using + :attr:`loader`. + + :param source: Jinja source to compile into a template. + :param globals: Extend the environment :attr:`globals` with + these extra variables available for all renders of this + template. If the template has already been loaded and + cached, its globals are updated with any new items. + :param template_class: Return an instance of this + :class:`Template` class. + """ + gs = self.make_globals(globals) + cls = template_class or self.template_class + return cls.from_code(self, self.compile(source), gs, None) + + def make_globals( + self, d: t.Optional[t.MutableMapping[str, t.Any]] + ) -> t.MutableMapping[str, t.Any]: + """Make the globals map for a template. Any given template + globals overlay the environment :attr:`globals`. + + Returns a :class:`collections.ChainMap`. This allows any changes + to a template's globals to only affect that template, while + changes to the environment's globals are still reflected. + However, avoid modifying any globals after a template is loaded. + + :param d: Dict of template-specific globals. + + .. versionchanged:: 3.0 + Use :class:`collections.ChainMap` to always prevent mutating + environment globals. + """ + if d is None: + d = {} + + return ChainMap(d, self.globals) + + +class Template: + """A compiled template that can be rendered. + + Use the methods on :class:`Environment` to create or load templates. + The environment is used to configure how templates are compiled and + behave. + + It is also possible to create a template object directly. This is + not usually recommended. The constructor takes most of the same + arguments as :class:`Environment`. All templates created with the + same environment arguments share the same ephemeral ``Environment`` + instance behind the scenes. + + A template object should be considered immutable. Modifications on + the object are not supported. + """ + + #: Type of environment to create when creating a template directly + #: rather than through an existing environment. + environment_class: t.Type[Environment] = Environment + + environment: Environment + globals: t.MutableMapping[str, t.Any] + name: t.Optional[str] + filename: t.Optional[str] + blocks: t.Dict[str, t.Callable[[Context], t.Iterator[str]]] + root_render_func: t.Callable[[Context], t.Iterator[str]] + _module: t.Optional["TemplateModule"] + _debug_info: str + _uptodate: t.Optional[t.Callable[[], bool]] + + def __new__( + cls, + source: t.Union[str, nodes.Template], + block_start_string: str = BLOCK_START_STRING, + block_end_string: str = BLOCK_END_STRING, + variable_start_string: str = VARIABLE_START_STRING, + variable_end_string: str = VARIABLE_END_STRING, + comment_start_string: str = COMMENT_START_STRING, + comment_end_string: str = COMMENT_END_STRING, + line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, + line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, + trim_blocks: bool = TRIM_BLOCKS, + lstrip_blocks: bool = LSTRIP_BLOCKS, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, + keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), + optimized: bool = True, + undefined: t.Type[Undefined] = Undefined, + finalize: t.Optional[t.Callable[..., t.Any]] = None, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, + enable_async: bool = False, + ) -> t.Any: # it returns a `Template`, but this breaks the sphinx build... + env = get_spontaneous_environment( + cls.environment_class, # type: ignore + block_start_string, + block_end_string, + variable_start_string, + variable_end_string, + comment_start_string, + comment_end_string, + line_statement_prefix, + line_comment_prefix, + trim_blocks, + lstrip_blocks, + newline_sequence, + keep_trailing_newline, + frozenset(extensions), + optimized, + undefined, # type: ignore + finalize, + autoescape, + None, + 0, + False, + None, + enable_async, + ) + return env.from_string(source, template_class=cls) + + @classmethod + def from_code( + cls, + environment: Environment, + code: CodeType, + globals: t.MutableMapping[str, t.Any], + uptodate: t.Optional[t.Callable[[], bool]] = None, + ) -> "Template": + """Creates a template object from compiled code and the globals. This + is used by the loaders and environment to create a template object. + """ + namespace = {"environment": environment, "__file__": code.co_filename} + exec(code, namespace) + rv = cls._from_namespace(environment, namespace, globals) + rv._uptodate = uptodate + return rv + + @classmethod + def from_module_dict( + cls, + environment: Environment, + module_dict: t.MutableMapping[str, t.Any], + globals: t.MutableMapping[str, t.Any], + ) -> "Template": + """Creates a template object from a module. This is used by the + module loader to create a template object. + + .. versionadded:: 2.4 + """ + return cls._from_namespace(environment, module_dict, globals) + + @classmethod + def _from_namespace( + cls, + environment: Environment, + namespace: t.MutableMapping[str, t.Any], + globals: t.MutableMapping[str, t.Any], + ) -> "Template": + t: "Template" = object.__new__(cls) + t.environment = environment + t.globals = globals + t.name = namespace["name"] + t.filename = namespace["__file__"] + t.blocks = namespace["blocks"] + + # render function and module + t.root_render_func = namespace["root"] # type: ignore + t._module = None + + # debug and loader helpers + t._debug_info = namespace["debug_info"] + t._uptodate = None + + # store the reference + namespace["environment"] = environment + namespace["__jinja_template__"] = t + + return t + + def render(self, *args: t.Any, **kwargs: t.Any) -> str: + """This method accepts the same arguments as the `dict` constructor: + A dict, a dict subclass or some keyword arguments. If no arguments + are given the context will be empty. These two calls do the same:: + + template.render(knights='that say nih') + template.render({'knights': 'that say nih'}) + + This will return the rendered template as a string. + """ + if self.environment.is_async: + import asyncio + + close = False + + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = asyncio.new_event_loop() + close = True + + try: + return loop.run_until_complete(self.render_async(*args, **kwargs)) + finally: + if close: + loop.close() + + ctx = self.new_context(dict(*args, **kwargs)) + + try: + return self.environment.concat(self.root_render_func(ctx)) # type: ignore + except Exception: + self.environment.handle_exception() + + async def render_async(self, *args: t.Any, **kwargs: t.Any) -> str: + """This works similar to :meth:`render` but returns a coroutine + that when awaited returns the entire rendered template string. This + requires the async feature to be enabled. + + Example usage:: + + await template.render_async(knights='that say nih; asynchronously') + """ + if not self.environment.is_async: + raise RuntimeError( + "The environment was not created with async mode enabled." + ) + + ctx = self.new_context(dict(*args, **kwargs)) + + try: + return self.environment.concat( # type: ignore + [n async for n in self.root_render_func(ctx)] # type: ignore + ) + except Exception: + return self.environment.handle_exception() + + def stream(self, *args: t.Any, **kwargs: t.Any) -> "TemplateStream": + """Works exactly like :meth:`generate` but returns a + :class:`TemplateStream`. + """ + return TemplateStream(self.generate(*args, **kwargs)) + + def generate(self, *args: t.Any, **kwargs: t.Any) -> t.Iterator[str]: + """For very large templates it can be useful to not render the whole + template at once but evaluate each statement after another and yield + piece for piece. This method basically does exactly that and returns + a generator that yields one item after another as strings. + + It accepts the same arguments as :meth:`render`. + """ + if self.environment.is_async: + import asyncio + + async def to_list() -> t.List[str]: + return [x async for x in self.generate_async(*args, **kwargs)] + + yield from asyncio.run(to_list()) + return + + ctx = self.new_context(dict(*args, **kwargs)) + + try: + yield from self.root_render_func(ctx) # type: ignore + except Exception: + yield self.environment.handle_exception() + + async def generate_async( + self, *args: t.Any, **kwargs: t.Any + ) -> t.AsyncIterator[str]: + """An async version of :meth:`generate`. Works very similarly but + returns an async iterator instead. + """ + if not self.environment.is_async: + raise RuntimeError( + "The environment was not created with async mode enabled." + ) + + ctx = self.new_context(dict(*args, **kwargs)) + + try: + async for event in self.root_render_func(ctx): # type: ignore + yield event + except Exception: + yield self.environment.handle_exception() + + def new_context( + self, + vars: t.Optional[t.Dict[str, t.Any]] = None, + shared: bool = False, + locals: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> Context: + """Create a new :class:`Context` for this template. The vars + provided will be passed to the template. Per default the globals + are added to the context. If shared is set to `True` the data + is passed as is to the context without adding the globals. + + `locals` can be a dict of local variables for internal usage. + """ + return new_context( + self.environment, self.name, self.blocks, vars, shared, self.globals, locals + ) + + def make_module( + self, + vars: t.Optional[t.Dict[str, t.Any]] = None, + shared: bool = False, + locals: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> "TemplateModule": + """This method works like the :attr:`module` attribute when called + without arguments but it will evaluate the template on every call + rather than caching it. It's also possible to provide + a dict which is then used as context. The arguments are the same + as for the :meth:`new_context` method. + """ + ctx = self.new_context(vars, shared, locals) + return TemplateModule(self, ctx) + + async def make_module_async( + self, + vars: t.Optional[t.Dict[str, t.Any]] = None, + shared: bool = False, + locals: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> "TemplateModule": + """As template module creation can invoke template code for + asynchronous executions this method must be used instead of the + normal :meth:`make_module` one. Likewise the module attribute + becomes unavailable in async mode. + """ + ctx = self.new_context(vars, shared, locals) + return TemplateModule( + self, ctx, [x async for x in self.root_render_func(ctx)] # type: ignore + ) + + @internalcode + def _get_default_module(self, ctx: t.Optional[Context] = None) -> "TemplateModule": + """If a context is passed in, this means that the template was + imported. Imported templates have access to the current + template's globals by default, but they can only be accessed via + the context during runtime. + + If there are new globals, we need to create a new module because + the cached module is already rendered and will not have access + to globals from the current context. This new module is not + cached because the template can be imported elsewhere, and it + should have access to only the current template's globals. + """ + if self.environment.is_async: + raise RuntimeError("Module is not available in async mode.") + + if ctx is not None: + keys = ctx.globals_keys - self.globals.keys() + + if keys: + return self.make_module({k: ctx.parent[k] for k in keys}) + + if self._module is None: + self._module = self.make_module() + + return self._module + + async def _get_default_module_async( + self, ctx: t.Optional[Context] = None + ) -> "TemplateModule": + if ctx is not None: + keys = ctx.globals_keys - self.globals.keys() + + if keys: + return await self.make_module_async({k: ctx.parent[k] for k in keys}) + + if self._module is None: + self._module = await self.make_module_async() + + return self._module + + @property + def module(self) -> "TemplateModule": + """The template as module. This is used for imports in the + template runtime but is also useful if one wants to access + exported template variables from the Python layer: + + >>> t = Template('{% macro foo() %}42{% endmacro %}23') + >>> str(t.module) + '23' + >>> t.module.foo() == u'42' + True + + This attribute is not available if async mode is enabled. + """ + return self._get_default_module() + + def get_corresponding_lineno(self, lineno: int) -> int: + """Return the source line number of a line number in the + generated bytecode as they are not in sync. + """ + for template_line, code_line in reversed(self.debug_info): + if code_line <= lineno: + return template_line + return 1 + + @property + def is_up_to_date(self) -> bool: + """If this variable is `False` there is a newer version available.""" + if self._uptodate is None: + return True + return self._uptodate() + + @property + def debug_info(self) -> t.List[t.Tuple[int, int]]: + """The debug info mapping.""" + if self._debug_info: + return [ + tuple(map(int, x.split("="))) # type: ignore + for x in self._debug_info.split("&") + ] + + return [] + + def __repr__(self) -> str: + if self.name is None: + name = f"memory:{id(self):x}" + else: + name = repr(self.name) + return f"<{type(self).__name__} {name}>" + + +class TemplateModule: + """Represents an imported template. All the exported names of the + template are available as attributes on this object. Additionally + converting it into a string renders the contents. + """ + + def __init__( + self, + template: Template, + context: Context, + body_stream: t.Optional[t.Iterable[str]] = None, + ) -> None: + if body_stream is None: + if context.environment.is_async: + raise RuntimeError( + "Async mode requires a body stream to be passed to" + " a template module. Use the async methods of the" + " API you are using." + ) + + body_stream = list(template.root_render_func(context)) # type: ignore + + self._body_stream = body_stream + self.__dict__.update(context.get_exported()) + self.__name__ = template.name + + def __html__(self) -> Markup: + return Markup(concat(self._body_stream)) + + def __str__(self) -> str: + return concat(self._body_stream) + + def __repr__(self) -> str: + if self.__name__ is None: + name = f"memory:{id(self):x}" + else: + name = repr(self.__name__) + return f"<{type(self).__name__} {name}>" + + +class TemplateExpression: + """The :meth:`jinja2.Environment.compile_expression` method returns an + instance of this object. It encapsulates the expression-like access + to the template with an expression it wraps. + """ + + def __init__(self, template: Template, undefined_to_none: bool) -> None: + self._template = template + self._undefined_to_none = undefined_to_none + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Optional[t.Any]: + context = self._template.new_context(dict(*args, **kwargs)) + consume(self._template.root_render_func(context)) # type: ignore + rv = context.vars["result"] + if self._undefined_to_none and isinstance(rv, Undefined): + rv = None + return rv + + +class TemplateStream: + """A template stream works pretty much like an ordinary python generator + but it can buffer multiple items to reduce the number of total iterations. + Per default the output is unbuffered which means that for every unbuffered + instruction in the template one string is yielded. + + If buffering is enabled with a buffer size of 5, five items are combined + into a new string. This is mainly useful if you are streaming + big templates to a client via WSGI which flushes after each iteration. + """ + + def __init__(self, gen: t.Iterator[str]) -> None: + self._gen = gen + self.disable_buffering() + + def dump( + self, + fp: t.Union[str, t.IO], + encoding: t.Optional[str] = None, + errors: t.Optional[str] = "strict", + ) -> None: + """Dump the complete stream into a file or file-like object. + Per default strings are written, if you want to encode + before writing specify an `encoding`. + + Example usage:: + + Template('Hello {{ name }}!').stream(name='foo').dump('hello.html') + """ + close = False + + if isinstance(fp, str): + if encoding is None: + encoding = "utf-8" + + fp = open(fp, "wb") + close = True + try: + if encoding is not None: + iterable = (x.encode(encoding, errors) for x in self) # type: ignore + else: + iterable = self # type: ignore + + if hasattr(fp, "writelines"): + fp.writelines(iterable) + else: + for item in iterable: + fp.write(item) + finally: + if close: + fp.close() + + def disable_buffering(self) -> None: + """Disable the output buffering.""" + self._next = partial(next, self._gen) + self.buffered = False + + def _buffered_generator(self, size: int) -> t.Iterator[str]: + buf: t.List[str] = [] + c_size = 0 + push = buf.append + + while True: + try: + while c_size < size: + c = next(self._gen) + push(c) + if c: + c_size += 1 + except StopIteration: + if not c_size: + return + yield concat(buf) + del buf[:] + c_size = 0 + + def enable_buffering(self, size: int = 5) -> None: + """Enable buffering. Buffer `size` items before yielding them.""" + if size <= 1: + raise ValueError("buffer size too small") + + self.buffered = True + self._next = partial(next, self._buffered_generator(size)) + + def __iter__(self) -> "TemplateStream": + return self + + def __next__(self) -> str: + return self._next() # type: ignore + + +# hook in default template class. if anyone reads this comment: ignore that +# it's possible to use custom templates ;-) +Environment.template_class = Template diff --git a/backend/test/lib/python3.8/site-packages/jinja2/exceptions.py b/backend/test/lib/python3.8/site-packages/jinja2/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..082ebe8f221d4e7e980e4d321c0a0c5da033b124 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/exceptions.py @@ -0,0 +1,166 @@ +import typing as t + +if t.TYPE_CHECKING: + from .runtime import Undefined + + +class TemplateError(Exception): + """Baseclass for all template errors.""" + + def __init__(self, message: t.Optional[str] = None) -> None: + super().__init__(message) + + @property + def message(self) -> t.Optional[str]: + return self.args[0] if self.args else None + + +class TemplateNotFound(IOError, LookupError, TemplateError): + """Raised if a template does not exist. + + .. versionchanged:: 2.11 + If the given name is :class:`Undefined` and no message was + provided, an :exc:`UndefinedError` is raised. + """ + + # Silence the Python warning about message being deprecated since + # it's not valid here. + message: t.Optional[str] = None + + def __init__( + self, + name: t.Optional[t.Union[str, "Undefined"]], + message: t.Optional[str] = None, + ) -> None: + IOError.__init__(self, name) + + if message is None: + from .runtime import Undefined + + if isinstance(name, Undefined): + name._fail_with_undefined_error() + + message = name + + self.message = message + self.name = name + self.templates = [name] + + def __str__(self) -> str: + return str(self.message) + + +class TemplatesNotFound(TemplateNotFound): + """Like :class:`TemplateNotFound` but raised if multiple templates + are selected. This is a subclass of :class:`TemplateNotFound` + exception, so just catching the base exception will catch both. + + .. versionchanged:: 2.11 + If a name in the list of names is :class:`Undefined`, a message + about it being undefined is shown rather than the empty string. + + .. versionadded:: 2.2 + """ + + def __init__( + self, + names: t.Sequence[t.Union[str, "Undefined"]] = (), + message: t.Optional[str] = None, + ) -> None: + if message is None: + from .runtime import Undefined + + parts = [] + + for name in names: + if isinstance(name, Undefined): + parts.append(name._undefined_message) + else: + parts.append(name) + + parts_str = ", ".join(map(str, parts)) + message = f"none of the templates given were found: {parts_str}" + + super().__init__(names[-1] if names else None, message) + self.templates = list(names) + + +class TemplateSyntaxError(TemplateError): + """Raised to tell the user that there is a problem with the template.""" + + def __init__( + self, + message: str, + lineno: int, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> None: + super().__init__(message) + self.lineno = lineno + self.name = name + self.filename = filename + self.source: t.Optional[str] = None + + # this is set to True if the debug.translate_syntax_error + # function translated the syntax error into a new traceback + self.translated = False + + def __str__(self) -> str: + # for translated errors we only return the message + if self.translated: + return t.cast(str, self.message) + + # otherwise attach some stuff + location = f"line {self.lineno}" + name = self.filename or self.name + if name: + location = f'File "{name}", {location}' + lines = [t.cast(str, self.message), " " + location] + + # if the source is set, add the line to the output + if self.source is not None: + try: + line = self.source.splitlines()[self.lineno - 1] + except IndexError: + pass + else: + lines.append(" " + line.strip()) + + return "\n".join(lines) + + def __reduce__(self): # type: ignore + # https://bugs.python.org/issue1692335 Exceptions that take + # multiple required arguments have problems with pickling. + # Without this, raises TypeError: __init__() missing 1 required + # positional argument: 'lineno' + return self.__class__, (self.message, self.lineno, self.name, self.filename) + + +class TemplateAssertionError(TemplateSyntaxError): + """Like a template syntax error, but covers cases where something in the + template caused an error at compile time that wasn't necessarily caused + by a syntax error. However it's a direct subclass of + :exc:`TemplateSyntaxError` and has the same attributes. + """ + + +class TemplateRuntimeError(TemplateError): + """A generic runtime error in the template engine. Under some situations + Jinja may raise this exception. + """ + + +class UndefinedError(TemplateRuntimeError): + """Raised if a template tries to operate on :class:`Undefined`.""" + + +class SecurityError(TemplateRuntimeError): + """Raised if a template tries to do something insecure if the + sandbox is enabled. + """ + + +class FilterArgumentError(TemplateRuntimeError): + """This error is raised if a filter was called with inappropriate + arguments + """ diff --git a/backend/test/lib/python3.8/site-packages/jinja2/ext.py b/backend/test/lib/python3.8/site-packages/jinja2/ext.py new file mode 100644 index 0000000000000000000000000000000000000000..d5550540cda01ea9da32747754d34603a7bbac0a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/ext.py @@ -0,0 +1,859 @@ +"""Extension API for adding custom tags and behavior.""" +import pprint +import re +import typing as t + +from markupsafe import Markup + +from . import defaults +from . import nodes +from .environment import Environment +from .exceptions import TemplateAssertionError +from .exceptions import TemplateSyntaxError +from .runtime import concat # type: ignore +from .runtime import Context +from .runtime import Undefined +from .utils import import_string +from .utils import pass_context + +if t.TYPE_CHECKING: + import typing_extensions as te + from .lexer import Token + from .lexer import TokenStream + from .parser import Parser + + class _TranslationsBasic(te.Protocol): + def gettext(self, message: str) -> str: + ... + + def ngettext(self, singular: str, plural: str, n: int) -> str: + pass + + class _TranslationsContext(_TranslationsBasic): + def pgettext(self, context: str, message: str) -> str: + ... + + def npgettext(self, context: str, singular: str, plural: str, n: int) -> str: + ... + + _SupportedTranslations = t.Union[_TranslationsBasic, _TranslationsContext] + + +# I18N functions available in Jinja templates. If the I18N library +# provides ugettext, it will be assigned to gettext. +GETTEXT_FUNCTIONS: t.Tuple[str, ...] = ( + "_", + "gettext", + "ngettext", + "pgettext", + "npgettext", +) +_ws_re = re.compile(r"\s*\n\s*") + + +class Extension: + """Extensions can be used to add extra functionality to the Jinja template + system at the parser level. Custom extensions are bound to an environment + but may not store environment specific data on `self`. The reason for + this is that an extension can be bound to another environment (for + overlays) by creating a copy and reassigning the `environment` attribute. + + As extensions are created by the environment they cannot accept any + arguments for configuration. One may want to work around that by using + a factory function, but that is not possible as extensions are identified + by their import name. The correct way to configure the extension is + storing the configuration values on the environment. Because this way the + environment ends up acting as central configuration storage the + attributes may clash which is why extensions have to ensure that the names + they choose for configuration are not too generic. ``prefix`` for example + is a terrible name, ``fragment_cache_prefix`` on the other hand is a good + name as includes the name of the extension (fragment cache). + """ + + identifier: t.ClassVar[str] + + def __init_subclass__(cls) -> None: + cls.identifier = f"{cls.__module__}.{cls.__name__}" + + #: if this extension parses this is the list of tags it's listening to. + tags: t.Set[str] = set() + + #: the priority of that extension. This is especially useful for + #: extensions that preprocess values. A lower value means higher + #: priority. + #: + #: .. versionadded:: 2.4 + priority = 100 + + def __init__(self, environment: Environment) -> None: + self.environment = environment + + def bind(self, environment: Environment) -> "Extension": + """Create a copy of this extension bound to another environment.""" + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.environment = environment + return rv + + def preprocess( + self, source: str, name: t.Optional[str], filename: t.Optional[str] = None + ) -> str: + """This method is called before the actual lexing and can be used to + preprocess the source. The `filename` is optional. The return value + must be the preprocessed source. + """ + return source + + def filter_stream( + self, stream: "TokenStream" + ) -> t.Union["TokenStream", t.Iterable["Token"]]: + """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used + to filter tokens returned. This method has to return an iterable of + :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a + :class:`~jinja2.lexer.TokenStream`. + """ + return stream + + def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]: + """If any of the :attr:`tags` matched this method is called with the + parser as first argument. The token the parser stream is pointing at + is the name token that matched. This method has to return one or a + list of multiple nodes. + """ + raise NotImplementedError() + + def attr( + self, name: str, lineno: t.Optional[int] = None + ) -> nodes.ExtensionAttribute: + """Return an attribute node for the current extension. This is useful + to pass constants on extensions to generated template code. + + :: + + self.attr('_my_attribute', lineno=lineno) + """ + return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno) + + def call_method( + self, + name: str, + args: t.Optional[t.List[nodes.Expr]] = None, + kwargs: t.Optional[t.List[nodes.Keyword]] = None, + dyn_args: t.Optional[nodes.Expr] = None, + dyn_kwargs: t.Optional[nodes.Expr] = None, + lineno: t.Optional[int] = None, + ) -> nodes.Call: + """Call a method of the extension. This is a shortcut for + :meth:`attr` + :class:`jinja2.nodes.Call`. + """ + if args is None: + args = [] + if kwargs is None: + kwargs = [] + return nodes.Call( + self.attr(name, lineno=lineno), + args, + kwargs, + dyn_args, + dyn_kwargs, + lineno=lineno, + ) + + +@pass_context +def _gettext_alias( + __context: Context, *args: t.Any, **kwargs: t.Any +) -> t.Union[t.Any, Undefined]: + return __context.call(__context.resolve("gettext"), *args, **kwargs) + + +def _make_new_gettext(func: t.Callable[[str], str]) -> t.Callable[..., str]: + @pass_context + def gettext(__context: Context, __string: str, **variables: t.Any) -> str: + rv = __context.call(func, __string) + if __context.eval_ctx.autoescape: + rv = Markup(rv) + # Always treat as a format string, even if there are no + # variables. This makes translation strings more consistent + # and predictable. This requires escaping + return rv % variables # type: ignore + + return gettext + + +def _make_new_ngettext(func: t.Callable[[str, str, int], str]) -> t.Callable[..., str]: + @pass_context + def ngettext( + __context: Context, + __singular: str, + __plural: str, + __num: int, + **variables: t.Any, + ) -> str: + variables.setdefault("num", __num) + rv = __context.call(func, __singular, __plural, __num) + if __context.eval_ctx.autoescape: + rv = Markup(rv) + # Always treat as a format string, see gettext comment above. + return rv % variables # type: ignore + + return ngettext + + +def _make_new_pgettext(func: t.Callable[[str, str], str]) -> t.Callable[..., str]: + @pass_context + def pgettext( + __context: Context, __string_ctx: str, __string: str, **variables: t.Any + ) -> str: + variables.setdefault("context", __string_ctx) + rv = __context.call(func, __string_ctx, __string) + + if __context.eval_ctx.autoescape: + rv = Markup(rv) + + # Always treat as a format string, see gettext comment above. + return rv % variables # type: ignore + + return pgettext + + +def _make_new_npgettext( + func: t.Callable[[str, str, str, int], str] +) -> t.Callable[..., str]: + @pass_context + def npgettext( + __context: Context, + __string_ctx: str, + __singular: str, + __plural: str, + __num: int, + **variables: t.Any, + ) -> str: + variables.setdefault("context", __string_ctx) + variables.setdefault("num", __num) + rv = __context.call(func, __string_ctx, __singular, __plural, __num) + + if __context.eval_ctx.autoescape: + rv = Markup(rv) + + # Always treat as a format string, see gettext comment above. + return rv % variables # type: ignore + + return npgettext + + +class InternationalizationExtension(Extension): + """This extension adds gettext support to Jinja.""" + + tags = {"trans"} + + # TODO: the i18n extension is currently reevaluating values in a few + # situations. Take this example: + # {% trans count=something() %}{{ count }} foo{% pluralize + # %}{{ count }} fooss{% endtrans %} + # something is called twice here. One time for the gettext value and + # the other time for the n-parameter of the ngettext function. + + def __init__(self, environment: Environment) -> None: + super().__init__(environment) + environment.globals["_"] = _gettext_alias + environment.extend( + install_gettext_translations=self._install, + install_null_translations=self._install_null, + install_gettext_callables=self._install_callables, + uninstall_gettext_translations=self._uninstall, + extract_translations=self._extract, + newstyle_gettext=False, + ) + + def _install( + self, translations: "_SupportedTranslations", newstyle: t.Optional[bool] = None + ) -> None: + # ugettext and ungettext are preferred in case the I18N library + # is providing compatibility with older Python versions. + gettext = getattr(translations, "ugettext", None) + if gettext is None: + gettext = translations.gettext + ngettext = getattr(translations, "ungettext", None) + if ngettext is None: + ngettext = translations.ngettext + + pgettext = getattr(translations, "pgettext", None) + npgettext = getattr(translations, "npgettext", None) + self._install_callables( + gettext, ngettext, newstyle=newstyle, pgettext=pgettext, npgettext=npgettext + ) + + def _install_null(self, newstyle: t.Optional[bool] = None) -> None: + import gettext + + translations = gettext.NullTranslations() + + if hasattr(translations, "pgettext"): + # Python < 3.8 + pgettext = translations.pgettext # type: ignore + else: + + def pgettext(c: str, s: str) -> str: + return s + + if hasattr(translations, "npgettext"): + npgettext = translations.npgettext # type: ignore + else: + + def npgettext(c: str, s: str, p: str, n: int) -> str: + return s if n == 1 else p + + self._install_callables( + gettext=translations.gettext, + ngettext=translations.ngettext, + newstyle=newstyle, + pgettext=pgettext, + npgettext=npgettext, + ) + + def _install_callables( + self, + gettext: t.Callable[[str], str], + ngettext: t.Callable[[str, str, int], str], + newstyle: t.Optional[bool] = None, + pgettext: t.Optional[t.Callable[[str, str], str]] = None, + npgettext: t.Optional[t.Callable[[str, str, str, int], str]] = None, + ) -> None: + if newstyle is not None: + self.environment.newstyle_gettext = newstyle # type: ignore + if self.environment.newstyle_gettext: # type: ignore + gettext = _make_new_gettext(gettext) + ngettext = _make_new_ngettext(ngettext) + + if pgettext is not None: + pgettext = _make_new_pgettext(pgettext) + + if npgettext is not None: + npgettext = _make_new_npgettext(npgettext) + + self.environment.globals.update( + gettext=gettext, ngettext=ngettext, pgettext=pgettext, npgettext=npgettext + ) + + def _uninstall(self, translations: "_SupportedTranslations") -> None: + for key in ("gettext", "ngettext", "pgettext", "npgettext"): + self.environment.globals.pop(key, None) + + def _extract( + self, + source: t.Union[str, nodes.Template], + gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS, + ) -> t.Iterator[ + t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]] + ]: + if isinstance(source, str): + source = self.environment.parse(source) + return extract_from_ast(source, gettext_functions) + + def parse(self, parser: "Parser") -> t.Union[nodes.Node, t.List[nodes.Node]]: + """Parse a translatable tag.""" + lineno = next(parser.stream).lineno + + context = None + context_token = parser.stream.next_if("string") + + if context_token is not None: + context = context_token.value + + # find all the variables referenced. Additionally a variable can be + # defined in the body of the trans block too, but this is checked at + # a later state. + plural_expr: t.Optional[nodes.Expr] = None + plural_expr_assignment: t.Optional[nodes.Assign] = None + num_called_num = False + variables: t.Dict[str, nodes.Expr] = {} + trimmed = None + while parser.stream.current.type != "block_end": + if variables: + parser.stream.expect("comma") + + # skip colon for python compatibility + if parser.stream.skip_if("colon"): + break + + token = parser.stream.expect("name") + if token.value in variables: + parser.fail( + f"translatable variable {token.value!r} defined twice.", + token.lineno, + exc=TemplateAssertionError, + ) + + # expressions + if parser.stream.current.type == "assign": + next(parser.stream) + variables[token.value] = var = parser.parse_expression() + elif trimmed is None and token.value in ("trimmed", "notrimmed"): + trimmed = token.value == "trimmed" + continue + else: + variables[token.value] = var = nodes.Name(token.value, "load") + + if plural_expr is None: + if isinstance(var, nodes.Call): + plural_expr = nodes.Name("_trans", "load") + variables[token.value] = plural_expr + plural_expr_assignment = nodes.Assign( + nodes.Name("_trans", "store"), var + ) + else: + plural_expr = var + num_called_num = token.value == "num" + + parser.stream.expect("block_end") + + plural = None + have_plural = False + referenced = set() + + # now parse until endtrans or pluralize + singular_names, singular = self._parse_block(parser, True) + if singular_names: + referenced.update(singular_names) + if plural_expr is None: + plural_expr = nodes.Name(singular_names[0], "load") + num_called_num = singular_names[0] == "num" + + # if we have a pluralize block, we parse that too + if parser.stream.current.test("name:pluralize"): + have_plural = True + next(parser.stream) + if parser.stream.current.type != "block_end": + token = parser.stream.expect("name") + if token.value not in variables: + parser.fail( + f"unknown variable {token.value!r} for pluralization", + token.lineno, + exc=TemplateAssertionError, + ) + plural_expr = variables[token.value] + num_called_num = token.value == "num" + parser.stream.expect("block_end") + plural_names, plural = self._parse_block(parser, False) + next(parser.stream) + referenced.update(plural_names) + else: + next(parser.stream) + + # register free names as simple name expressions + for name in referenced: + if name not in variables: + variables[name] = nodes.Name(name, "load") + + if not have_plural: + plural_expr = None + elif plural_expr is None: + parser.fail("pluralize without variables", lineno) + + if trimmed is None: + trimmed = self.environment.policies["ext.i18n.trimmed"] + if trimmed: + singular = self._trim_whitespace(singular) + if plural: + plural = self._trim_whitespace(plural) + + node = self._make_node( + singular, + plural, + context, + variables, + plural_expr, + bool(referenced), + num_called_num and have_plural, + ) + node.set_lineno(lineno) + if plural_expr_assignment is not None: + return [plural_expr_assignment, node] + else: + return node + + def _trim_whitespace(self, string: str, _ws_re: t.Pattern[str] = _ws_re) -> str: + return _ws_re.sub(" ", string.strip()) + + def _parse_block( + self, parser: "Parser", allow_pluralize: bool + ) -> t.Tuple[t.List[str], str]: + """Parse until the next block tag with a given name.""" + referenced = [] + buf = [] + + while True: + if parser.stream.current.type == "data": + buf.append(parser.stream.current.value.replace("%", "%%")) + next(parser.stream) + elif parser.stream.current.type == "variable_begin": + next(parser.stream) + name = parser.stream.expect("name").value + referenced.append(name) + buf.append(f"%({name})s") + parser.stream.expect("variable_end") + elif parser.stream.current.type == "block_begin": + next(parser.stream) + if parser.stream.current.test("name:endtrans"): + break + elif parser.stream.current.test("name:pluralize"): + if allow_pluralize: + break + parser.fail( + "a translatable section can have only one pluralize section" + ) + parser.fail( + "control structures in translatable sections are not allowed" + ) + elif parser.stream.eos: + parser.fail("unclosed translation block") + else: + raise RuntimeError("internal parser error") + + return referenced, concat(buf) + + def _make_node( + self, + singular: str, + plural: t.Optional[str], + context: t.Optional[str], + variables: t.Dict[str, nodes.Expr], + plural_expr: t.Optional[nodes.Expr], + vars_referenced: bool, + num_called_num: bool, + ) -> nodes.Output: + """Generates a useful node from the data provided.""" + newstyle = self.environment.newstyle_gettext # type: ignore + node: nodes.Expr + + # no variables referenced? no need to escape for old style + # gettext invocations only if there are vars. + if not vars_referenced and not newstyle: + singular = singular.replace("%%", "%") + if plural: + plural = plural.replace("%%", "%") + + func_name = "gettext" + func_args: t.List[nodes.Expr] = [nodes.Const(singular)] + + if context is not None: + func_args.insert(0, nodes.Const(context)) + func_name = f"p{func_name}" + + if plural_expr is not None: + func_name = f"n{func_name}" + func_args.extend((nodes.Const(plural), plural_expr)) + + node = nodes.Call(nodes.Name(func_name, "load"), func_args, [], None, None) + + # in case newstyle gettext is used, the method is powerful + # enough to handle the variable expansion and autoescape + # handling itself + if newstyle: + for key, value in variables.items(): + # the function adds that later anyways in case num was + # called num, so just skip it. + if num_called_num and key == "num": + continue + node.kwargs.append(nodes.Keyword(key, value)) + + # otherwise do that here + else: + # mark the return value as safe if we are in an + # environment with autoescaping turned on + node = nodes.MarkSafeIfAutoescape(node) + if variables: + node = nodes.Mod( + node, + nodes.Dict( + [ + nodes.Pair(nodes.Const(key), value) + for key, value in variables.items() + ] + ), + ) + return nodes.Output([node]) + + +class ExprStmtExtension(Extension): + """Adds a `do` tag to Jinja that works like the print statement just + that it doesn't print the return value. + """ + + tags = {"do"} + + def parse(self, parser: "Parser") -> nodes.ExprStmt: + node = nodes.ExprStmt(lineno=next(parser.stream).lineno) + node.node = parser.parse_tuple() + return node + + +class LoopControlExtension(Extension): + """Adds break and continue to the template engine.""" + + tags = {"break", "continue"} + + def parse(self, parser: "Parser") -> t.Union[nodes.Break, nodes.Continue]: + token = next(parser.stream) + if token.value == "break": + return nodes.Break(lineno=token.lineno) + return nodes.Continue(lineno=token.lineno) + + +class DebugExtension(Extension): + """A ``{% debug %}`` tag that dumps the available variables, + filters, and tests. + + .. code-block:: html+jinja + + <pre>{% debug %}</pre> + + .. code-block:: text + + {'context': {'cycler': <class 'jinja2.utils.Cycler'>, + ..., + 'namespace': <class 'jinja2.utils.Namespace'>}, + 'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd', + ..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'], + 'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined', + ..., 'odd', 'sameas', 'sequence', 'string', 'undefined', 'upper']} + + .. versionadded:: 2.11.0 + """ + + tags = {"debug"} + + def parse(self, parser: "Parser") -> nodes.Output: + lineno = parser.stream.expect("name:debug").lineno + context = nodes.ContextReference() + result = self.call_method("_render", [context], lineno=lineno) + return nodes.Output([result], lineno=lineno) + + def _render(self, context: Context) -> str: + result = { + "context": context.get_all(), + "filters": sorted(self.environment.filters.keys()), + "tests": sorted(self.environment.tests.keys()), + } + + # Set the depth since the intent is to show the top few names. + return pprint.pformat(result, depth=3, compact=True) + + +def extract_from_ast( + ast: nodes.Template, + gettext_functions: t.Sequence[str] = GETTEXT_FUNCTIONS, + babel_style: bool = True, +) -> t.Iterator[ + t.Tuple[int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]]] +]: + """Extract localizable strings from the given template node. Per + default this function returns matches in babel style that means non string + parameters as well as keyword arguments are returned as `None`. This + allows Babel to figure out what you really meant if you are using + gettext functions that allow keyword arguments for placeholder expansion. + If you don't want that behavior set the `babel_style` parameter to `False` + which causes only strings to be returned and parameters are always stored + in tuples. As a consequence invalid gettext calls (calls without a single + string parameter or string parameters after non-string parameters) are + skipped. + + This example explains the behavior: + + >>> from jinja2 import Environment + >>> env = Environment() + >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}') + >>> list(extract_from_ast(node)) + [(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))] + >>> list(extract_from_ast(node, babel_style=False)) + [(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))] + + For every string found this function yields a ``(lineno, function, + message)`` tuple, where: + + * ``lineno`` is the number of the line on which the string was found, + * ``function`` is the name of the ``gettext`` function used (if the + string was extracted from embedded Python code), and + * ``message`` is the string, or a tuple of strings for functions + with multiple string arguments. + + This extraction function operates on the AST and is because of that unable + to extract any comments. For comment support you have to use the babel + extraction interface or extract comments yourself. + """ + out: t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]] + + for node in ast.find_all(nodes.Call): + if ( + not isinstance(node.node, nodes.Name) + or node.node.name not in gettext_functions + ): + continue + + strings: t.List[t.Optional[str]] = [] + + for arg in node.args: + if isinstance(arg, nodes.Const) and isinstance(arg.value, str): + strings.append(arg.value) + else: + strings.append(None) + + for _ in node.kwargs: + strings.append(None) + if node.dyn_args is not None: + strings.append(None) + if node.dyn_kwargs is not None: + strings.append(None) + + if not babel_style: + out = tuple(x for x in strings if x is not None) + + if not out: + continue + else: + if len(strings) == 1: + out = strings[0] + else: + out = tuple(strings) + + yield node.lineno, node.node.name, out + + +class _CommentFinder: + """Helper class to find comments in a token stream. Can only + find comments for gettext calls forwards. Once the comment + from line 4 is found, a comment for line 1 will not return a + usable value. + """ + + def __init__( + self, tokens: t.Sequence[t.Tuple[int, str, str]], comment_tags: t.Sequence[str] + ) -> None: + self.tokens = tokens + self.comment_tags = comment_tags + self.offset = 0 + self.last_lineno = 0 + + def find_backwards(self, offset: int) -> t.List[str]: + try: + for _, token_type, token_value in reversed( + self.tokens[self.offset : offset] + ): + if token_type in ("comment", "linecomment"): + try: + prefix, comment = token_value.split(None, 1) + except ValueError: + continue + if prefix in self.comment_tags: + return [comment.rstrip()] + return [] + finally: + self.offset = offset + + def find_comments(self, lineno: int) -> t.List[str]: + if not self.comment_tags or self.last_lineno > lineno: + return [] + for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]): + if token_lineno > lineno: + return self.find_backwards(self.offset + idx) + return self.find_backwards(len(self.tokens)) + + +def babel_extract( + fileobj: t.BinaryIO, + keywords: t.Sequence[str], + comment_tags: t.Sequence[str], + options: t.Dict[str, t.Any], +) -> t.Iterator[ + t.Tuple[ + int, str, t.Union[t.Optional[str], t.Tuple[t.Optional[str], ...]], t.List[str] + ] +]: + """Babel extraction method for Jinja templates. + + .. versionchanged:: 2.3 + Basic support for translation comments was added. If `comment_tags` + is now set to a list of keywords for extraction, the extractor will + try to find the best preceding comment that begins with one of the + keywords. For best results, make sure to not have more than one + gettext call in one line of code and the matching comment in the + same line or the line before. + + .. versionchanged:: 2.5.1 + The `newstyle_gettext` flag can be set to `True` to enable newstyle + gettext calls. + + .. versionchanged:: 2.7 + A `silent` option can now be provided. If set to `False` template + syntax errors are propagated instead of being ignored. + + :param fileobj: the file-like object the messages should be extracted from + :param keywords: a list of keywords (i.e. function names) that should be + recognized as translation functions + :param comment_tags: a list of translator tags to search for and include + in the results. + :param options: a dictionary of additional options (optional) + :return: an iterator over ``(lineno, funcname, message, comments)`` tuples. + (comments will be empty currently) + """ + extensions: t.Dict[t.Type[Extension], None] = {} + + for extension_name in options.get("extensions", "").split(","): + extension_name = extension_name.strip() + + if not extension_name: + continue + + extensions[import_string(extension_name)] = None + + if InternationalizationExtension not in extensions: + extensions[InternationalizationExtension] = None + + def getbool(options: t.Mapping[str, str], key: str, default: bool = False) -> bool: + return options.get(key, str(default)).lower() in {"1", "on", "yes", "true"} + + silent = getbool(options, "silent", True) + environment = Environment( + options.get("block_start_string", defaults.BLOCK_START_STRING), + options.get("block_end_string", defaults.BLOCK_END_STRING), + options.get("variable_start_string", defaults.VARIABLE_START_STRING), + options.get("variable_end_string", defaults.VARIABLE_END_STRING), + options.get("comment_start_string", defaults.COMMENT_START_STRING), + options.get("comment_end_string", defaults.COMMENT_END_STRING), + options.get("line_statement_prefix") or defaults.LINE_STATEMENT_PREFIX, + options.get("line_comment_prefix") or defaults.LINE_COMMENT_PREFIX, + getbool(options, "trim_blocks", defaults.TRIM_BLOCKS), + getbool(options, "lstrip_blocks", defaults.LSTRIP_BLOCKS), + defaults.NEWLINE_SEQUENCE, + getbool(options, "keep_trailing_newline", defaults.KEEP_TRAILING_NEWLINE), + tuple(extensions), + cache_size=0, + auto_reload=False, + ) + + if getbool(options, "trimmed"): + environment.policies["ext.i18n.trimmed"] = True + if getbool(options, "newstyle_gettext"): + environment.newstyle_gettext = True # type: ignore + + source = fileobj.read().decode(options.get("encoding", "utf-8")) + try: + node = environment.parse(source) + tokens = list(environment.lex(environment.preprocess(source))) + except TemplateSyntaxError: + if not silent: + raise + # skip templates with syntax errors + return + + finder = _CommentFinder(tokens, comment_tags) + for lineno, func, message in extract_from_ast(node, keywords): + yield lineno, func, message, finder.find_comments(lineno) + + +#: nicer import names +i18n = InternationalizationExtension +do = ExprStmtExtension +loopcontrols = LoopControlExtension +debug = DebugExtension diff --git a/backend/test/lib/python3.8/site-packages/jinja2/filters.py b/backend/test/lib/python3.8/site-packages/jinja2/filters.py new file mode 100644 index 0000000000000000000000000000000000000000..ed07c4c0e2ae1b6203b3468cda8a303ecf3d7832 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/filters.py @@ -0,0 +1,1840 @@ +"""Built-in template filters used with the ``|`` operator.""" +import math +import random +import re +import typing +import typing as t +from collections import abc +from itertools import chain +from itertools import groupby + +from markupsafe import escape +from markupsafe import Markup +from markupsafe import soft_str + +from .async_utils import async_variant +from .async_utils import auto_aiter +from .async_utils import auto_await +from .async_utils import auto_to_list +from .exceptions import FilterArgumentError +from .runtime import Undefined +from .utils import htmlsafe_json_dumps +from .utils import pass_context +from .utils import pass_environment +from .utils import pass_eval_context +from .utils import pformat +from .utils import url_quote +from .utils import urlize + +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + from .nodes import EvalContext + from .runtime import Context + from .sandbox import SandboxedEnvironment # noqa: F401 + + class HasHTML(te.Protocol): + def __html__(self) -> str: + pass + + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) +K = t.TypeVar("K") +V = t.TypeVar("V") + + +def ignore_case(value: V) -> V: + """For use as a postprocessor for :func:`make_attrgetter`. Converts strings + to lowercase and returns other types as-is.""" + if isinstance(value, str): + return t.cast(V, value.lower()) + + return value + + +def make_attrgetter( + environment: "Environment", + attribute: t.Optional[t.Union[str, int]], + postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None, + default: t.Optional[t.Any] = None, +) -> t.Callable[[t.Any], t.Any]: + """Returns a callable that looks up the given attribute from a + passed object with the rules of the environment. Dots are allowed + to access attributes of attributes. Integer parts in paths are + looked up as integers. + """ + parts = _prepare_attribute_parts(attribute) + + def attrgetter(item: t.Any) -> t.Any: + for part in parts: + item = environment.getitem(item, part) + + if default is not None and isinstance(item, Undefined): + item = default + + if postprocess is not None: + item = postprocess(item) + + return item + + return attrgetter + + +def make_multi_attrgetter( + environment: "Environment", + attribute: t.Optional[t.Union[str, int]], + postprocess: t.Optional[t.Callable[[t.Any], t.Any]] = None, +) -> t.Callable[[t.Any], t.List[t.Any]]: + """Returns a callable that looks up the given comma separated + attributes from a passed object with the rules of the environment. + Dots are allowed to access attributes of each attribute. Integer + parts in paths are looked up as integers. + + The value returned by the returned callable is a list of extracted + attribute values. + + Examples of attribute: "attr1,attr2", "attr1.inner1.0,attr2.inner2.0", etc. + """ + if isinstance(attribute, str): + split: t.Sequence[t.Union[str, int, None]] = attribute.split(",") + else: + split = [attribute] + + parts = [_prepare_attribute_parts(item) for item in split] + + def attrgetter(item: t.Any) -> t.List[t.Any]: + items = [None] * len(parts) + + for i, attribute_part in enumerate(parts): + item_i = item + + for part in attribute_part: + item_i = environment.getitem(item_i, part) + + if postprocess is not None: + item_i = postprocess(item_i) + + items[i] = item_i + + return items + + return attrgetter + + +def _prepare_attribute_parts( + attr: t.Optional[t.Union[str, int]] +) -> t.List[t.Union[str, int]]: + if attr is None: + return [] + + if isinstance(attr, str): + return [int(x) if x.isdigit() else x for x in attr.split(".")] + + return [attr] + + +def do_forceescape(value: "t.Union[str, HasHTML]") -> Markup: + """Enforce HTML escaping. This will probably double escape variables.""" + if hasattr(value, "__html__"): + value = t.cast("HasHTML", value).__html__() + + return escape(str(value)) + + +def do_urlencode( + value: t.Union[str, t.Mapping[str, t.Any], t.Iterable[t.Tuple[str, t.Any]]] +) -> str: + """Quote data for use in a URL path or query using UTF-8. + + Basic wrapper around :func:`urllib.parse.quote` when given a + string, or :func:`urllib.parse.urlencode` for a dict or iterable. + + :param value: Data to quote. A string will be quoted directly. A + dict or iterable of ``(key, value)`` pairs will be joined as a + query string. + + When given a string, "/" is not quoted. HTTP servers treat "/" and + "%2F" equivalently in paths. If you need quoted slashes, use the + ``|replace("/", "%2F")`` filter. + + .. versionadded:: 2.7 + """ + if isinstance(value, str) or not isinstance(value, abc.Iterable): + return url_quote(value) + + if isinstance(value, dict): + items: t.Iterable[t.Tuple[str, t.Any]] = value.items() + else: + items = value # type: ignore + + return "&".join( + f"{url_quote(k, for_qs=True)}={url_quote(v, for_qs=True)}" for k, v in items + ) + + +@pass_eval_context +def do_replace( + eval_ctx: "EvalContext", s: str, old: str, new: str, count: t.Optional[int] = None +) -> str: + """Return a copy of the value with all occurrences of a substring + replaced with a new one. The first argument is the substring + that should be replaced, the second is the replacement string. + If the optional third argument ``count`` is given, only the first + ``count`` occurrences are replaced: + + .. sourcecode:: jinja + + {{ "Hello World"|replace("Hello", "Goodbye") }} + -> Goodbye World + + {{ "aaaaargh"|replace("a", "d'oh, ", 2) }} + -> d'oh, d'oh, aaargh + """ + if count is None: + count = -1 + + if not eval_ctx.autoescape: + return str(s).replace(str(old), str(new), count) + + if ( + hasattr(old, "__html__") + or hasattr(new, "__html__") + and not hasattr(s, "__html__") + ): + s = escape(s) + else: + s = soft_str(s) + + return s.replace(soft_str(old), soft_str(new), count) + + +def do_upper(s: str) -> str: + """Convert a value to uppercase.""" + return soft_str(s).upper() + + +def do_lower(s: str) -> str: + """Convert a value to lowercase.""" + return soft_str(s).lower() + + +def do_items(value: t.Union[t.Mapping[K, V], Undefined]) -> t.Iterator[t.Tuple[K, V]]: + """Return an iterator over the ``(key, value)`` items of a mapping. + + ``x|items`` is the same as ``x.items()``, except if ``x`` is + undefined an empty iterator is returned. + + This filter is useful if you expect the template to be rendered with + an implementation of Jinja in another programming language that does + not have a ``.items()`` method on its mapping type. + + .. code-block:: html+jinja + + <dl> + {% for key, value in my_dict|items %} + <dt>{{ key }} + <dd>{{ value }} + {% endfor %} + </dl> + + .. versionadded:: 3.1 + """ + if isinstance(value, Undefined): + return + + if not isinstance(value, abc.Mapping): + raise TypeError("Can only get item pairs from a mapping.") + + yield from value.items() + + +@pass_eval_context +def do_xmlattr( + eval_ctx: "EvalContext", d: t.Mapping[str, t.Any], autospace: bool = True +) -> str: + """Create an SGML/XML attribute string based on the items in a dict. + All values that are neither `none` nor `undefined` are automatically + escaped: + + .. sourcecode:: html+jinja + + <ul{{ {'class': 'my_list', 'missing': none, + 'id': 'list-%d'|format(variable)}|xmlattr }}> + ... + </ul> + + Results in something like this: + + .. sourcecode:: html + + <ul class="my_list" id="list-42"> + ... + </ul> + + As you can see it automatically prepends a space in front of the item + if the filter returned something unless the second parameter is false. + """ + rv = " ".join( + f'{escape(key)}="{escape(value)}"' + for key, value in d.items() + if value is not None and not isinstance(value, Undefined) + ) + + if autospace and rv: + rv = " " + rv + + if eval_ctx.autoescape: + rv = Markup(rv) + + return rv + + +def do_capitalize(s: str) -> str: + """Capitalize a value. The first character will be uppercase, all others + lowercase. + """ + return soft_str(s).capitalize() + + +_word_beginning_split_re = re.compile(r"([-\s({\[<]+)") + + +def do_title(s: str) -> str: + """Return a titlecased version of the value. I.e. words will start with + uppercase letters, all remaining characters are lowercase. + """ + return "".join( + [ + item[0].upper() + item[1:].lower() + for item in _word_beginning_split_re.split(soft_str(s)) + if item + ] + ) + + +def do_dictsort( + value: t.Mapping[K, V], + case_sensitive: bool = False, + by: 'te.Literal["key", "value"]' = "key", + reverse: bool = False, +) -> t.List[t.Tuple[K, V]]: + """Sort a dict and yield (key, value) pairs. Python dicts may not + be in the order you want to display them in, so sort them first. + + .. sourcecode:: jinja + + {% for key, value in mydict|dictsort %} + sort the dict by key, case insensitive + + {% for key, value in mydict|dictsort(reverse=true) %} + sort the dict by key, case insensitive, reverse order + + {% for key, value in mydict|dictsort(true) %} + sort the dict by key, case sensitive + + {% for key, value in mydict|dictsort(false, 'value') %} + sort the dict by value, case insensitive + """ + if by == "key": + pos = 0 + elif by == "value": + pos = 1 + else: + raise FilterArgumentError('You can only sort by either "key" or "value"') + + def sort_func(item: t.Tuple[t.Any, t.Any]) -> t.Any: + value = item[pos] + + if not case_sensitive: + value = ignore_case(value) + + return value + + return sorted(value.items(), key=sort_func, reverse=reverse) + + +@pass_environment +def do_sort( + environment: "Environment", + value: "t.Iterable[V]", + reverse: bool = False, + case_sensitive: bool = False, + attribute: t.Optional[t.Union[str, int]] = None, +) -> "t.List[V]": + """Sort an iterable using Python's :func:`sorted`. + + .. sourcecode:: jinja + + {% for city in cities|sort %} + ... + {% endfor %} + + :param reverse: Sort descending instead of ascending. + :param case_sensitive: When sorting strings, sort upper and lower + case separately. + :param attribute: When sorting objects or dicts, an attribute or + key to sort by. Can use dot notation like ``"address.city"``. + Can be a list of attributes like ``"age,name"``. + + The sort is stable, it does not change the relative order of + elements that compare equal. This makes it is possible to chain + sorts on different attributes and ordering. + + .. sourcecode:: jinja + + {% for user in users|sort(attribute="name") + |sort(reverse=true, attribute="age") %} + ... + {% endfor %} + + As a shortcut to chaining when the direction is the same for all + attributes, pass a comma separate list of attributes. + + .. sourcecode:: jinja + + {% for user in users|sort(attribute="age,name") %} + ... + {% endfor %} + + .. versionchanged:: 2.11.0 + The ``attribute`` parameter can be a comma separated list of + attributes, e.g. ``"age,name"``. + + .. versionchanged:: 2.6 + The ``attribute`` parameter was added. + """ + key_func = make_multi_attrgetter( + environment, attribute, postprocess=ignore_case if not case_sensitive else None + ) + return sorted(value, key=key_func, reverse=reverse) + + +@pass_environment +def do_unique( + environment: "Environment", + value: "t.Iterable[V]", + case_sensitive: bool = False, + attribute: t.Optional[t.Union[str, int]] = None, +) -> "t.Iterator[V]": + """Returns a list of unique items from the given iterable. + + .. sourcecode:: jinja + + {{ ['foo', 'bar', 'foobar', 'FooBar']|unique|list }} + -> ['foo', 'bar', 'foobar'] + + The unique items are yielded in the same order as their first occurrence in + the iterable passed to the filter. + + :param case_sensitive: Treat upper and lower case strings as distinct. + :param attribute: Filter objects with unique values for this attribute. + """ + getter = make_attrgetter( + environment, attribute, postprocess=ignore_case if not case_sensitive else None + ) + seen = set() + + for item in value: + key = getter(item) + + if key not in seen: + seen.add(key) + yield item + + +def _min_or_max( + environment: "Environment", + value: "t.Iterable[V]", + func: "t.Callable[..., V]", + case_sensitive: bool, + attribute: t.Optional[t.Union[str, int]], +) -> "t.Union[V, Undefined]": + it = iter(value) + + try: + first = next(it) + except StopIteration: + return environment.undefined("No aggregated item, sequence was empty.") + + key_func = make_attrgetter( + environment, attribute, postprocess=ignore_case if not case_sensitive else None + ) + return func(chain([first], it), key=key_func) + + +@pass_environment +def do_min( + environment: "Environment", + value: "t.Iterable[V]", + case_sensitive: bool = False, + attribute: t.Optional[t.Union[str, int]] = None, +) -> "t.Union[V, Undefined]": + """Return the smallest item from the sequence. + + .. sourcecode:: jinja + + {{ [1, 2, 3]|min }} + -> 1 + + :param case_sensitive: Treat upper and lower case strings as distinct. + :param attribute: Get the object with the min value of this attribute. + """ + return _min_or_max(environment, value, min, case_sensitive, attribute) + + +@pass_environment +def do_max( + environment: "Environment", + value: "t.Iterable[V]", + case_sensitive: bool = False, + attribute: t.Optional[t.Union[str, int]] = None, +) -> "t.Union[V, Undefined]": + """Return the largest item from the sequence. + + .. sourcecode:: jinja + + {{ [1, 2, 3]|max }} + -> 3 + + :param case_sensitive: Treat upper and lower case strings as distinct. + :param attribute: Get the object with the max value of this attribute. + """ + return _min_or_max(environment, value, max, case_sensitive, attribute) + + +def do_default( + value: V, + default_value: V = "", # type: ignore + boolean: bool = False, +) -> V: + """If the value is undefined it will return the passed default value, + otherwise the value of the variable: + + .. sourcecode:: jinja + + {{ my_variable|default('my_variable is not defined') }} + + This will output the value of ``my_variable`` if the variable was + defined, otherwise ``'my_variable is not defined'``. If you want + to use default with variables that evaluate to false you have to + set the second parameter to `true`: + + .. sourcecode:: jinja + + {{ ''|default('the string was empty', true) }} + + .. versionchanged:: 2.11 + It's now possible to configure the :class:`~jinja2.Environment` with + :class:`~jinja2.ChainableUndefined` to make the `default` filter work + on nested elements and attributes that may contain undefined values + in the chain without getting an :exc:`~jinja2.UndefinedError`. + """ + if isinstance(value, Undefined) or (boolean and not value): + return default_value + + return value + + +@pass_eval_context +def sync_do_join( + eval_ctx: "EvalContext", + value: t.Iterable, + d: str = "", + attribute: t.Optional[t.Union[str, int]] = None, +) -> str: + """Return a string which is the concatenation of the strings in the + sequence. The separator between elements is an empty string per + default, you can define it with the optional parameter: + + .. sourcecode:: jinja + + {{ [1, 2, 3]|join('|') }} + -> 1|2|3 + + {{ [1, 2, 3]|join }} + -> 123 + + It is also possible to join certain attributes of an object: + + .. sourcecode:: jinja + + {{ users|join(', ', attribute='username') }} + + .. versionadded:: 2.6 + The `attribute` parameter was added. + """ + if attribute is not None: + value = map(make_attrgetter(eval_ctx.environment, attribute), value) + + # no automatic escaping? joining is a lot easier then + if not eval_ctx.autoescape: + return str(d).join(map(str, value)) + + # if the delimiter doesn't have an html representation we check + # if any of the items has. If yes we do a coercion to Markup + if not hasattr(d, "__html__"): + value = list(value) + do_escape = False + + for idx, item in enumerate(value): + if hasattr(item, "__html__"): + do_escape = True + else: + value[idx] = str(item) + + if do_escape: + d = escape(d) + else: + d = str(d) + + return d.join(value) + + # no html involved, to normal joining + return soft_str(d).join(map(soft_str, value)) + + +@async_variant(sync_do_join) # type: ignore +async def do_join( + eval_ctx: "EvalContext", + value: t.Union[t.AsyncIterable, t.Iterable], + d: str = "", + attribute: t.Optional[t.Union[str, int]] = None, +) -> str: + return sync_do_join(eval_ctx, await auto_to_list(value), d, attribute) + + +def do_center(value: str, width: int = 80) -> str: + """Centers the value in a field of a given width.""" + return soft_str(value).center(width) + + +@pass_environment +def sync_do_first( + environment: "Environment", seq: "t.Iterable[V]" +) -> "t.Union[V, Undefined]": + """Return the first item of a sequence.""" + try: + return next(iter(seq)) + except StopIteration: + return environment.undefined("No first item, sequence was empty.") + + +@async_variant(sync_do_first) # type: ignore +async def do_first( + environment: "Environment", seq: "t.Union[t.AsyncIterable[V], t.Iterable[V]]" +) -> "t.Union[V, Undefined]": + try: + return await auto_aiter(seq).__anext__() + except StopAsyncIteration: + return environment.undefined("No first item, sequence was empty.") + + +@pass_environment +def do_last( + environment: "Environment", seq: "t.Reversible[V]" +) -> "t.Union[V, Undefined]": + """Return the last item of a sequence. + + Note: Does not work with generators. You may want to explicitly + convert it to a list: + + .. sourcecode:: jinja + + {{ data | selectattr('name', '==', 'Jinja') | list | last }} + """ + try: + return next(iter(reversed(seq))) + except StopIteration: + return environment.undefined("No last item, sequence was empty.") + + +# No async do_last, it may not be safe in async mode. + + +@pass_context +def do_random(context: "Context", seq: "t.Sequence[V]") -> "t.Union[V, Undefined]": + """Return a random item from the sequence.""" + try: + return random.choice(seq) + except IndexError: + return context.environment.undefined("No random item, sequence was empty.") + + +def do_filesizeformat(value: t.Union[str, float, int], binary: bool = False) -> str: + """Format the value like a 'human-readable' file size (i.e. 13 kB, + 4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega, + Giga, etc.), if the second parameter is set to `True` the binary + prefixes are used (Mebi, Gibi). + """ + bytes = float(value) + base = 1024 if binary else 1000 + prefixes = [ + ("KiB" if binary else "kB"), + ("MiB" if binary else "MB"), + ("GiB" if binary else "GB"), + ("TiB" if binary else "TB"), + ("PiB" if binary else "PB"), + ("EiB" if binary else "EB"), + ("ZiB" if binary else "ZB"), + ("YiB" if binary else "YB"), + ] + + if bytes == 1: + return "1 Byte" + elif bytes < base: + return f"{int(bytes)} Bytes" + else: + for i, prefix in enumerate(prefixes): + unit = base ** (i + 2) + + if bytes < unit: + return f"{base * bytes / unit:.1f} {prefix}" + + return f"{base * bytes / unit:.1f} {prefix}" + + +def do_pprint(value: t.Any) -> str: + """Pretty print a variable. Useful for debugging.""" + return pformat(value) + + +_uri_scheme_re = re.compile(r"^([\w.+-]{2,}:(/){0,2})$") + + +@pass_eval_context +def do_urlize( + eval_ctx: "EvalContext", + value: str, + trim_url_limit: t.Optional[int] = None, + nofollow: bool = False, + target: t.Optional[str] = None, + rel: t.Optional[str] = None, + extra_schemes: t.Optional[t.Iterable[str]] = None, +) -> str: + """Convert URLs in text into clickable links. + + This may not recognize links in some situations. Usually, a more + comprehensive formatter, such as a Markdown library, is a better + choice. + + Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email + addresses. Links with trailing punctuation (periods, commas, closing + parentheses) and leading punctuation (opening parentheses) are + recognized excluding the punctuation. Email addresses that include + header fields are not recognized (for example, + ``mailto:address@example.com?cc=copy@example.com``). + + :param value: Original text containing URLs to link. + :param trim_url_limit: Shorten displayed URL values to this length. + :param nofollow: Add the ``rel=nofollow`` attribute to links. + :param target: Add the ``target`` attribute to links. + :param rel: Add the ``rel`` attribute to links. + :param extra_schemes: Recognize URLs that start with these schemes + in addition to the default behavior. Defaults to + ``env.policies["urlize.extra_schemes"]``, which defaults to no + extra schemes. + + .. versionchanged:: 3.0 + The ``extra_schemes`` parameter was added. + + .. versionchanged:: 3.0 + Generate ``https://`` links for URLs without a scheme. + + .. versionchanged:: 3.0 + The parsing rules were updated. Recognize email addresses with + or without the ``mailto:`` scheme. Validate IP addresses. Ignore + parentheses and brackets in more cases. + + .. versionchanged:: 2.8 + The ``target`` parameter was added. + """ + policies = eval_ctx.environment.policies + rel_parts = set((rel or "").split()) + + if nofollow: + rel_parts.add("nofollow") + + rel_parts.update((policies["urlize.rel"] or "").split()) + rel = " ".join(sorted(rel_parts)) or None + + if target is None: + target = policies["urlize.target"] + + if extra_schemes is None: + extra_schemes = policies["urlize.extra_schemes"] or () + + for scheme in extra_schemes: + if _uri_scheme_re.fullmatch(scheme) is None: + raise FilterArgumentError(f"{scheme!r} is not a valid URI scheme prefix.") + + rv = urlize( + value, + trim_url_limit=trim_url_limit, + rel=rel, + target=target, + extra_schemes=extra_schemes, + ) + + if eval_ctx.autoescape: + rv = Markup(rv) + + return rv + + +def do_indent( + s: str, width: t.Union[int, str] = 4, first: bool = False, blank: bool = False +) -> str: + """Return a copy of the string with each line indented by 4 spaces. The + first line and blank lines are not indented by default. + + :param width: Number of spaces, or a string, to indent by. + :param first: Don't skip indenting the first line. + :param blank: Don't skip indenting empty lines. + + .. versionchanged:: 3.0 + ``width`` can be a string. + + .. versionchanged:: 2.10 + Blank lines are not indented by default. + + Rename the ``indentfirst`` argument to ``first``. + """ + if isinstance(width, str): + indention = width + else: + indention = " " * width + + newline = "\n" + + if isinstance(s, Markup): + indention = Markup(indention) + newline = Markup(newline) + + s += newline # this quirk is necessary for splitlines method + + if blank: + rv = (newline + indention).join(s.splitlines()) + else: + lines = s.splitlines() + rv = lines.pop(0) + + if lines: + rv += newline + newline.join( + indention + line if line else line for line in lines + ) + + if first: + rv = indention + rv + + return rv + + +@pass_environment +def do_truncate( + env: "Environment", + s: str, + length: int = 255, + killwords: bool = False, + end: str = "...", + leeway: t.Optional[int] = None, +) -> str: + """Return a truncated copy of the string. The length is specified + with the first parameter which defaults to ``255``. If the second + parameter is ``true`` the filter will cut the text at length. Otherwise + it will discard the last word. If the text was in fact + truncated it will append an ellipsis sign (``"..."``). If you want a + different ellipsis sign than ``"..."`` you can specify it using the + third parameter. Strings that only exceed the length by the tolerance + margin given in the fourth parameter will not be truncated. + + .. sourcecode:: jinja + + {{ "foo bar baz qux"|truncate(9) }} + -> "foo..." + {{ "foo bar baz qux"|truncate(9, True) }} + -> "foo ba..." + {{ "foo bar baz qux"|truncate(11) }} + -> "foo bar baz qux" + {{ "foo bar baz qux"|truncate(11, False, '...', 0) }} + -> "foo bar..." + + The default leeway on newer Jinja versions is 5 and was 0 before but + can be reconfigured globally. + """ + if leeway is None: + leeway = env.policies["truncate.leeway"] + + assert length >= len(end), f"expected length >= {len(end)}, got {length}" + assert leeway >= 0, f"expected leeway >= 0, got {leeway}" + + if len(s) <= length + leeway: + return s + + if killwords: + return s[: length - len(end)] + end + + result = s[: length - len(end)].rsplit(" ", 1)[0] + return result + end + + +@pass_environment +def do_wordwrap( + environment: "Environment", + s: str, + width: int = 79, + break_long_words: bool = True, + wrapstring: t.Optional[str] = None, + break_on_hyphens: bool = True, +) -> str: + """Wrap a string to the given width. Existing newlines are treated + as paragraphs to be wrapped separately. + + :param s: Original text to wrap. + :param width: Maximum length of wrapped lines. + :param break_long_words: If a word is longer than ``width``, break + it across lines. + :param break_on_hyphens: If a word contains hyphens, it may be split + across lines. + :param wrapstring: String to join each wrapped line. Defaults to + :attr:`Environment.newline_sequence`. + + .. versionchanged:: 2.11 + Existing newlines are treated as paragraphs wrapped separately. + + .. versionchanged:: 2.11 + Added the ``break_on_hyphens`` parameter. + + .. versionchanged:: 2.7 + Added the ``wrapstring`` parameter. + """ + import textwrap + + if wrapstring is None: + wrapstring = environment.newline_sequence + + # textwrap.wrap doesn't consider existing newlines when wrapping. + # If the string has a newline before width, wrap will still insert + # a newline at width, resulting in a short line. Instead, split and + # wrap each paragraph individually. + return wrapstring.join( + [ + wrapstring.join( + textwrap.wrap( + line, + width=width, + expand_tabs=False, + replace_whitespace=False, + break_long_words=break_long_words, + break_on_hyphens=break_on_hyphens, + ) + ) + for line in s.splitlines() + ] + ) + + +_word_re = re.compile(r"\w+") + + +def do_wordcount(s: str) -> int: + """Count the words in that string.""" + return len(_word_re.findall(soft_str(s))) + + +def do_int(value: t.Any, default: int = 0, base: int = 10) -> int: + """Convert the value into an integer. If the + conversion doesn't work it will return ``0``. You can + override this default using the first parameter. You + can also override the default base (10) in the second + parameter, which handles input with prefixes such as + 0b, 0o and 0x for bases 2, 8 and 16 respectively. + The base is ignored for decimal numbers and non-string values. + """ + try: + if isinstance(value, str): + return int(value, base) + + return int(value) + except (TypeError, ValueError): + # this quirk is necessary so that "42.23"|int gives 42. + try: + return int(float(value)) + except (TypeError, ValueError): + return default + + +def do_float(value: t.Any, default: float = 0.0) -> float: + """Convert the value into a floating point number. If the + conversion doesn't work it will return ``0.0``. You can + override this default using the first parameter. + """ + try: + return float(value) + except (TypeError, ValueError): + return default + + +def do_format(value: str, *args: t.Any, **kwargs: t.Any) -> str: + """Apply the given values to a `printf-style`_ format string, like + ``string % values``. + + .. sourcecode:: jinja + + {{ "%s, %s!"|format(greeting, name) }} + Hello, World! + + In most cases it should be more convenient and efficient to use the + ``%`` operator or :meth:`str.format`. + + .. code-block:: text + + {{ "%s, %s!" % (greeting, name) }} + {{ "{}, {}!".format(greeting, name) }} + + .. _printf-style: https://docs.python.org/library/stdtypes.html + #printf-style-string-formatting + """ + if args and kwargs: + raise FilterArgumentError( + "can't handle positional and keyword arguments at the same time" + ) + + return soft_str(value) % (kwargs or args) + + +def do_trim(value: str, chars: t.Optional[str] = None) -> str: + """Strip leading and trailing characters, by default whitespace.""" + return soft_str(value).strip(chars) + + +def do_striptags(value: "t.Union[str, HasHTML]") -> str: + """Strip SGML/XML tags and replace adjacent whitespace by one space.""" + if hasattr(value, "__html__"): + value = t.cast("HasHTML", value).__html__() + + return Markup(str(value)).striptags() + + +def sync_do_slice( + value: "t.Collection[V]", slices: int, fill_with: "t.Optional[V]" = None +) -> "t.Iterator[t.List[V]]": + """Slice an iterator and return a list of lists containing + those items. Useful if you want to create a div containing + three ul tags that represent columns: + + .. sourcecode:: html+jinja + + <div class="columnwrapper"> + {%- for column in items|slice(3) %} + <ul class="column-{{ loop.index }}"> + {%- for item in column %} + <li>{{ item }}</li> + {%- endfor %} + </ul> + {%- endfor %} + </div> + + If you pass it a second argument it's used to fill missing + values on the last iteration. + """ + seq = list(value) + length = len(seq) + items_per_slice = length // slices + slices_with_extra = length % slices + offset = 0 + + for slice_number in range(slices): + start = offset + slice_number * items_per_slice + + if slice_number < slices_with_extra: + offset += 1 + + end = offset + (slice_number + 1) * items_per_slice + tmp = seq[start:end] + + if fill_with is not None and slice_number >= slices_with_extra: + tmp.append(fill_with) + + yield tmp + + +@async_variant(sync_do_slice) # type: ignore +async def do_slice( + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + slices: int, + fill_with: t.Optional[t.Any] = None, +) -> "t.Iterator[t.List[V]]": + return sync_do_slice(await auto_to_list(value), slices, fill_with) + + +def do_batch( + value: "t.Iterable[V]", linecount: int, fill_with: "t.Optional[V]" = None +) -> "t.Iterator[t.List[V]]": + """ + A filter that batches items. It works pretty much like `slice` + just the other way round. It returns a list of lists with the + given number of items. If you provide a second parameter this + is used to fill up missing items. See this example: + + .. sourcecode:: html+jinja + + <table> + {%- for row in items|batch(3, ' ') %} + <tr> + {%- for column in row %} + <td>{{ column }}</td> + {%- endfor %} + </tr> + {%- endfor %} + </table> + """ + tmp: "t.List[V]" = [] + + for item in value: + if len(tmp) == linecount: + yield tmp + tmp = [] + + tmp.append(item) + + if tmp: + if fill_with is not None and len(tmp) < linecount: + tmp += [fill_with] * (linecount - len(tmp)) + + yield tmp + + +def do_round( + value: float, + precision: int = 0, + method: 'te.Literal["common", "ceil", "floor"]' = "common", +) -> float: + """Round the number to a given precision. The first + parameter specifies the precision (default is ``0``), the + second the rounding method: + + - ``'common'`` rounds either up or down + - ``'ceil'`` always rounds up + - ``'floor'`` always rounds down + + If you don't specify a method ``'common'`` is used. + + .. sourcecode:: jinja + + {{ 42.55|round }} + -> 43.0 + {{ 42.55|round(1, 'floor') }} + -> 42.5 + + Note that even if rounded to 0 precision, a float is returned. If + you need a real integer, pipe it through `int`: + + .. sourcecode:: jinja + + {{ 42.55|round|int }} + -> 43 + """ + if method not in {"common", "ceil", "floor"}: + raise FilterArgumentError("method must be common, ceil or floor") + + if method == "common": + return round(value, precision) + + func = getattr(math, method) + return t.cast(float, func(value * (10**precision)) / (10**precision)) + + +class _GroupTuple(t.NamedTuple): + grouper: t.Any + list: t.List + + # Use the regular tuple repr to hide this subclass if users print + # out the value during debugging. + def __repr__(self) -> str: + return tuple.__repr__(self) + + def __str__(self) -> str: + return tuple.__str__(self) + + +@pass_environment +def sync_do_groupby( + environment: "Environment", + value: "t.Iterable[V]", + attribute: t.Union[str, int], + default: t.Optional[t.Any] = None, + case_sensitive: bool = False, +) -> "t.List[_GroupTuple]": + """Group a sequence of objects by an attribute using Python's + :func:`itertools.groupby`. The attribute can use dot notation for + nested access, like ``"address.city"``. Unlike Python's ``groupby``, + the values are sorted first so only one group is returned for each + unique value. + + For example, a list of ``User`` objects with a ``city`` attribute + can be rendered in groups. In this example, ``grouper`` refers to + the ``city`` value of the group. + + .. sourcecode:: html+jinja + + <ul>{% for city, items in users|groupby("city") %} + <li>{{ city }} + <ul>{% for user in items %} + <li>{{ user.name }} + {% endfor %}</ul> + </li> + {% endfor %}</ul> + + ``groupby`` yields namedtuples of ``(grouper, list)``, which + can be used instead of the tuple unpacking above. ``grouper`` is the + value of the attribute, and ``list`` is the items with that value. + + .. sourcecode:: html+jinja + + <ul>{% for group in users|groupby("city") %} + <li>{{ group.grouper }}: {{ group.list|join(", ") }} + {% endfor %}</ul> + + You can specify a ``default`` value to use if an object in the list + does not have the given attribute. + + .. sourcecode:: jinja + + <ul>{% for city, items in users|groupby("city", default="NY") %} + <li>{{ city }}: {{ items|map(attribute="name")|join(", ") }}</li> + {% endfor %}</ul> + + Like the :func:`~jinja-filters.sort` filter, sorting and grouping is + case-insensitive by default. The ``key`` for each group will have + the case of the first item in that group of values. For example, if + a list of users has cities ``["CA", "NY", "ca"]``, the "CA" group + will have two values. This can be disabled by passing + ``case_sensitive=True``. + + .. versionchanged:: 3.1 + Added the ``case_sensitive`` parameter. Sorting and grouping is + case-insensitive by default, matching other filters that do + comparisons. + + .. versionchanged:: 3.0 + Added the ``default`` parameter. + + .. versionchanged:: 2.6 + The attribute supports dot notation for nested access. + """ + expr = make_attrgetter( + environment, + attribute, + postprocess=ignore_case if not case_sensitive else None, + default=default, + ) + out = [ + _GroupTuple(key, list(values)) + for key, values in groupby(sorted(value, key=expr), expr) + ] + + if not case_sensitive: + # Return the real key from the first value instead of the lowercase key. + output_expr = make_attrgetter(environment, attribute, default=default) + out = [_GroupTuple(output_expr(values[0]), values) for _, values in out] + + return out + + +@async_variant(sync_do_groupby) # type: ignore +async def do_groupby( + environment: "Environment", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + attribute: t.Union[str, int], + default: t.Optional[t.Any] = None, + case_sensitive: bool = False, +) -> "t.List[_GroupTuple]": + expr = make_attrgetter( + environment, + attribute, + postprocess=ignore_case if not case_sensitive else None, + default=default, + ) + out = [ + _GroupTuple(key, await auto_to_list(values)) + for key, values in groupby(sorted(await auto_to_list(value), key=expr), expr) + ] + + if not case_sensitive: + # Return the real key from the first value instead of the lowercase key. + output_expr = make_attrgetter(environment, attribute, default=default) + out = [_GroupTuple(output_expr(values[0]), values) for _, values in out] + + return out + + +@pass_environment +def sync_do_sum( + environment: "Environment", + iterable: "t.Iterable[V]", + attribute: t.Optional[t.Union[str, int]] = None, + start: V = 0, # type: ignore +) -> V: + """Returns the sum of a sequence of numbers plus the value of parameter + 'start' (which defaults to 0). When the sequence is empty it returns + start. + + It is also possible to sum up only certain attributes: + + .. sourcecode:: jinja + + Total: {{ items|sum(attribute='price') }} + + .. versionchanged:: 2.6 + The ``attribute`` parameter was added to allow summing up over + attributes. Also the ``start`` parameter was moved on to the right. + """ + if attribute is not None: + iterable = map(make_attrgetter(environment, attribute), iterable) + + return sum(iterable, start) # type: ignore[no-any-return, call-overload] + + +@async_variant(sync_do_sum) # type: ignore +async def do_sum( + environment: "Environment", + iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + attribute: t.Optional[t.Union[str, int]] = None, + start: V = 0, # type: ignore +) -> V: + rv = start + + if attribute is not None: + func = make_attrgetter(environment, attribute) + else: + + def func(x: V) -> V: + return x + + async for item in auto_aiter(iterable): + rv += func(item) + + return rv + + +def sync_do_list(value: "t.Iterable[V]") -> "t.List[V]": + """Convert the value into a list. If it was a string the returned list + will be a list of characters. + """ + return list(value) + + +@async_variant(sync_do_list) # type: ignore +async def do_list(value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]") -> "t.List[V]": + return await auto_to_list(value) + + +def do_mark_safe(value: str) -> Markup: + """Mark the value as safe which means that in an environment with automatic + escaping enabled this variable will not be escaped. + """ + return Markup(value) + + +def do_mark_unsafe(value: str) -> str: + """Mark a value as unsafe. This is the reverse operation for :func:`safe`.""" + return str(value) + + +@typing.overload +def do_reverse(value: str) -> str: + ... + + +@typing.overload +def do_reverse(value: "t.Iterable[V]") -> "t.Iterable[V]": + ... + + +def do_reverse(value: t.Union[str, t.Iterable[V]]) -> t.Union[str, t.Iterable[V]]: + """Reverse the object or return an iterator that iterates over it the other + way round. + """ + if isinstance(value, str): + return value[::-1] + + try: + return reversed(value) # type: ignore + except TypeError: + try: + rv = list(value) + rv.reverse() + return rv + except TypeError as e: + raise FilterArgumentError("argument must be iterable") from e + + +@pass_environment +def do_attr( + environment: "Environment", obj: t.Any, name: str +) -> t.Union[Undefined, t.Any]: + """Get an attribute of an object. ``foo|attr("bar")`` works like + ``foo.bar`` just that always an attribute is returned and items are not + looked up. + + See :ref:`Notes on subscriptions <notes-on-subscriptions>` for more details. + """ + try: + name = str(name) + except UnicodeError: + pass + else: + try: + value = getattr(obj, name) + except AttributeError: + pass + else: + if environment.sandboxed: + environment = t.cast("SandboxedEnvironment", environment) + + if not environment.is_safe_attribute(obj, name, value): + return environment.unsafe_undefined(obj, name) + + return value + + return environment.undefined(obj=obj, name=name) + + +@typing.overload +def sync_do_map( + context: "Context", value: t.Iterable, name: str, *args: t.Any, **kwargs: t.Any +) -> t.Iterable: + ... + + +@typing.overload +def sync_do_map( + context: "Context", + value: t.Iterable, + *, + attribute: str = ..., + default: t.Optional[t.Any] = None, +) -> t.Iterable: + ... + + +@pass_context +def sync_do_map( + context: "Context", value: t.Iterable, *args: t.Any, **kwargs: t.Any +) -> t.Iterable: + """Applies a filter on a sequence of objects or looks up an attribute. + This is useful when dealing with lists of objects but you are really + only interested in a certain value of it. + + The basic usage is mapping on an attribute. Imagine you have a list + of users but you are only interested in a list of usernames: + + .. sourcecode:: jinja + + Users on this page: {{ users|map(attribute='username')|join(', ') }} + + You can specify a ``default`` value to use if an object in the list + does not have the given attribute. + + .. sourcecode:: jinja + + {{ users|map(attribute="username", default="Anonymous")|join(", ") }} + + Alternatively you can let it invoke a filter by passing the name of the + filter and the arguments afterwards. A good example would be applying a + text conversion filter on a sequence: + + .. sourcecode:: jinja + + Users on this page: {{ titles|map('lower')|join(', ') }} + + Similar to a generator comprehension such as: + + .. code-block:: python + + (u.username for u in users) + (getattr(u, "username", "Anonymous") for u in users) + (do_lower(x) for x in titles) + + .. versionchanged:: 2.11.0 + Added the ``default`` parameter. + + .. versionadded:: 2.7 + """ + if value: + func = prepare_map(context, args, kwargs) + + for item in value: + yield func(item) + + +@typing.overload +def do_map( + context: "Context", + value: t.Union[t.AsyncIterable, t.Iterable], + name: str, + *args: t.Any, + **kwargs: t.Any, +) -> t.Iterable: + ... + + +@typing.overload +def do_map( + context: "Context", + value: t.Union[t.AsyncIterable, t.Iterable], + *, + attribute: str = ..., + default: t.Optional[t.Any] = None, +) -> t.Iterable: + ... + + +@async_variant(sync_do_map) # type: ignore +async def do_map( + context: "Context", + value: t.Union[t.AsyncIterable, t.Iterable], + *args: t.Any, + **kwargs: t.Any, +) -> t.AsyncIterable: + if value: + func = prepare_map(context, args, kwargs) + + async for item in auto_aiter(value): + yield await auto_await(func(item)) + + +@pass_context +def sync_do_select( + context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any +) -> "t.Iterator[V]": + """Filters a sequence of objects by applying a test to each object, + and only selecting the objects with the test succeeding. + + If no test is specified, each object will be evaluated as a boolean. + + Example usage: + + .. sourcecode:: jinja + + {{ numbers|select("odd") }} + {{ numbers|select("odd") }} + {{ numbers|select("divisibleby", 3) }} + {{ numbers|select("lessthan", 42) }} + {{ strings|select("equalto", "mystring") }} + + Similar to a generator comprehension such as: + + .. code-block:: python + + (n for n in numbers if test_odd(n)) + (n for n in numbers if test_divisibleby(n, 3)) + + .. versionadded:: 2.7 + """ + return select_or_reject(context, value, args, kwargs, lambda x: x, False) + + +@async_variant(sync_do_select) # type: ignore +async def do_select( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + *args: t.Any, + **kwargs: t.Any, +) -> "t.AsyncIterator[V]": + return async_select_or_reject(context, value, args, kwargs, lambda x: x, False) + + +@pass_context +def sync_do_reject( + context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any +) -> "t.Iterator[V]": + """Filters a sequence of objects by applying a test to each object, + and rejecting the objects with the test succeeding. + + If no test is specified, each object will be evaluated as a boolean. + + Example usage: + + .. sourcecode:: jinja + + {{ numbers|reject("odd") }} + + Similar to a generator comprehension such as: + + .. code-block:: python + + (n for n in numbers if not test_odd(n)) + + .. versionadded:: 2.7 + """ + return select_or_reject(context, value, args, kwargs, lambda x: not x, False) + + +@async_variant(sync_do_reject) # type: ignore +async def do_reject( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + *args: t.Any, + **kwargs: t.Any, +) -> "t.AsyncIterator[V]": + return async_select_or_reject(context, value, args, kwargs, lambda x: not x, False) + + +@pass_context +def sync_do_selectattr( + context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any +) -> "t.Iterator[V]": + """Filters a sequence of objects by applying a test to the specified + attribute of each object, and only selecting the objects with the + test succeeding. + + If no test is specified, the attribute's value will be evaluated as + a boolean. + + Example usage: + + .. sourcecode:: jinja + + {{ users|selectattr("is_active") }} + {{ users|selectattr("email", "none") }} + + Similar to a generator comprehension such as: + + .. code-block:: python + + (u for user in users if user.is_active) + (u for user in users if test_none(user.email)) + + .. versionadded:: 2.7 + """ + return select_or_reject(context, value, args, kwargs, lambda x: x, True) + + +@async_variant(sync_do_selectattr) # type: ignore +async def do_selectattr( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + *args: t.Any, + **kwargs: t.Any, +) -> "t.AsyncIterator[V]": + return async_select_or_reject(context, value, args, kwargs, lambda x: x, True) + + +@pass_context +def sync_do_rejectattr( + context: "Context", value: "t.Iterable[V]", *args: t.Any, **kwargs: t.Any +) -> "t.Iterator[V]": + """Filters a sequence of objects by applying a test to the specified + attribute of each object, and rejecting the objects with the test + succeeding. + + If no test is specified, the attribute's value will be evaluated as + a boolean. + + .. sourcecode:: jinja + + {{ users|rejectattr("is_active") }} + {{ users|rejectattr("email", "none") }} + + Similar to a generator comprehension such as: + + .. code-block:: python + + (u for user in users if not user.is_active) + (u for user in users if not test_none(user.email)) + + .. versionadded:: 2.7 + """ + return select_or_reject(context, value, args, kwargs, lambda x: not x, True) + + +@async_variant(sync_do_rejectattr) # type: ignore +async def do_rejectattr( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + *args: t.Any, + **kwargs: t.Any, +) -> "t.AsyncIterator[V]": + return async_select_or_reject(context, value, args, kwargs, lambda x: not x, True) + + +@pass_eval_context +def do_tojson( + eval_ctx: "EvalContext", value: t.Any, indent: t.Optional[int] = None +) -> Markup: + """Serialize an object to a string of JSON, and mark it safe to + render in HTML. This filter is only for use in HTML documents. + + The returned string is safe to render in HTML documents and + ``<script>`` tags. The exception is in HTML attributes that are + double quoted; either use single quotes or the ``|forceescape`` + filter. + + :param value: The object to serialize to JSON. + :param indent: The ``indent`` parameter passed to ``dumps``, for + pretty-printing the value. + + .. versionadded:: 2.9 + """ + policies = eval_ctx.environment.policies + dumps = policies["json.dumps_function"] + kwargs = policies["json.dumps_kwargs"] + + if indent is not None: + kwargs = kwargs.copy() + kwargs["indent"] = indent + + return htmlsafe_json_dumps(value, dumps=dumps, **kwargs) + + +def prepare_map( + context: "Context", args: t.Tuple, kwargs: t.Dict[str, t.Any] +) -> t.Callable[[t.Any], t.Any]: + if not args and "attribute" in kwargs: + attribute = kwargs.pop("attribute") + default = kwargs.pop("default", None) + + if kwargs: + raise FilterArgumentError( + f"Unexpected keyword argument {next(iter(kwargs))!r}" + ) + + func = make_attrgetter(context.environment, attribute, default=default) + else: + try: + name = args[0] + args = args[1:] + except LookupError: + raise FilterArgumentError("map requires a filter argument") from None + + def func(item: t.Any) -> t.Any: + return context.environment.call_filter( + name, item, args, kwargs, context=context + ) + + return func + + +def prepare_select_or_reject( + context: "Context", + args: t.Tuple, + kwargs: t.Dict[str, t.Any], + modfunc: t.Callable[[t.Any], t.Any], + lookup_attr: bool, +) -> t.Callable[[t.Any], t.Any]: + if lookup_attr: + try: + attr = args[0] + except LookupError: + raise FilterArgumentError("Missing parameter for attribute name") from None + + transfunc = make_attrgetter(context.environment, attr) + off = 1 + else: + off = 0 + + def transfunc(x: V) -> V: + return x + + try: + name = args[off] + args = args[1 + off :] + + def func(item: t.Any) -> t.Any: + return context.environment.call_test(name, item, args, kwargs) + + except LookupError: + func = bool # type: ignore + + return lambda item: modfunc(func(transfunc(item))) + + +def select_or_reject( + context: "Context", + value: "t.Iterable[V]", + args: t.Tuple, + kwargs: t.Dict[str, t.Any], + modfunc: t.Callable[[t.Any], t.Any], + lookup_attr: bool, +) -> "t.Iterator[V]": + if value: + func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr) + + for item in value: + if func(item): + yield item + + +async def async_select_or_reject( + context: "Context", + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", + args: t.Tuple, + kwargs: t.Dict[str, t.Any], + modfunc: t.Callable[[t.Any], t.Any], + lookup_attr: bool, +) -> "t.AsyncIterator[V]": + if value: + func = prepare_select_or_reject(context, args, kwargs, modfunc, lookup_attr) + + async for item in auto_aiter(value): + if func(item): + yield item + + +FILTERS = { + "abs": abs, + "attr": do_attr, + "batch": do_batch, + "capitalize": do_capitalize, + "center": do_center, + "count": len, + "d": do_default, + "default": do_default, + "dictsort": do_dictsort, + "e": escape, + "escape": escape, + "filesizeformat": do_filesizeformat, + "first": do_first, + "float": do_float, + "forceescape": do_forceescape, + "format": do_format, + "groupby": do_groupby, + "indent": do_indent, + "int": do_int, + "join": do_join, + "last": do_last, + "length": len, + "list": do_list, + "lower": do_lower, + "items": do_items, + "map": do_map, + "min": do_min, + "max": do_max, + "pprint": do_pprint, + "random": do_random, + "reject": do_reject, + "rejectattr": do_rejectattr, + "replace": do_replace, + "reverse": do_reverse, + "round": do_round, + "safe": do_mark_safe, + "select": do_select, + "selectattr": do_selectattr, + "slice": do_slice, + "sort": do_sort, + "string": soft_str, + "striptags": do_striptags, + "sum": do_sum, + "title": do_title, + "trim": do_trim, + "truncate": do_truncate, + "unique": do_unique, + "upper": do_upper, + "urlencode": do_urlencode, + "urlize": do_urlize, + "wordcount": do_wordcount, + "wordwrap": do_wordwrap, + "xmlattr": do_xmlattr, + "tojson": do_tojson, +} diff --git a/backend/test/lib/python3.8/site-packages/jinja2/idtracking.py b/backend/test/lib/python3.8/site-packages/jinja2/idtracking.py new file mode 100644 index 0000000000000000000000000000000000000000..995ebaa0c8178ddb9e0479e0e9f6d30ed863a785 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/idtracking.py @@ -0,0 +1,318 @@ +import typing as t + +from . import nodes +from .visitor import NodeVisitor + +VAR_LOAD_PARAMETER = "param" +VAR_LOAD_RESOLVE = "resolve" +VAR_LOAD_ALIAS = "alias" +VAR_LOAD_UNDEFINED = "undefined" + + +def find_symbols( + nodes: t.Iterable[nodes.Node], parent_symbols: t.Optional["Symbols"] = None +) -> "Symbols": + sym = Symbols(parent=parent_symbols) + visitor = FrameSymbolVisitor(sym) + for node in nodes: + visitor.visit(node) + return sym + + +def symbols_for_node( + node: nodes.Node, parent_symbols: t.Optional["Symbols"] = None +) -> "Symbols": + sym = Symbols(parent=parent_symbols) + sym.analyze_node(node) + return sym + + +class Symbols: + def __init__( + self, parent: t.Optional["Symbols"] = None, level: t.Optional[int] = None + ) -> None: + if level is None: + if parent is None: + level = 0 + else: + level = parent.level + 1 + + self.level: int = level + self.parent = parent + self.refs: t.Dict[str, str] = {} + self.loads: t.Dict[str, t.Any] = {} + self.stores: t.Set[str] = set() + + def analyze_node(self, node: nodes.Node, **kwargs: t.Any) -> None: + visitor = RootVisitor(self) + visitor.visit(node, **kwargs) + + def _define_ref( + self, name: str, load: t.Optional[t.Tuple[str, t.Optional[str]]] = None + ) -> str: + ident = f"l_{self.level}_{name}" + self.refs[name] = ident + if load is not None: + self.loads[ident] = load + return ident + + def find_load(self, target: str) -> t.Optional[t.Any]: + if target in self.loads: + return self.loads[target] + + if self.parent is not None: + return self.parent.find_load(target) + + return None + + def find_ref(self, name: str) -> t.Optional[str]: + if name in self.refs: + return self.refs[name] + + if self.parent is not None: + return self.parent.find_ref(name) + + return None + + def ref(self, name: str) -> str: + rv = self.find_ref(name) + if rv is None: + raise AssertionError( + "Tried to resolve a name to a reference that was" + f" unknown to the frame ({name!r})" + ) + return rv + + def copy(self) -> "Symbols": + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.refs = self.refs.copy() + rv.loads = self.loads.copy() + rv.stores = self.stores.copy() + return rv + + def store(self, name: str) -> None: + self.stores.add(name) + + # If we have not see the name referenced yet, we need to figure + # out what to set it to. + if name not in self.refs: + # If there is a parent scope we check if the name has a + # reference there. If it does it means we might have to alias + # to a variable there. + if self.parent is not None: + outer_ref = self.parent.find_ref(name) + if outer_ref is not None: + self._define_ref(name, load=(VAR_LOAD_ALIAS, outer_ref)) + return + + # Otherwise we can just set it to undefined. + self._define_ref(name, load=(VAR_LOAD_UNDEFINED, None)) + + def declare_parameter(self, name: str) -> str: + self.stores.add(name) + return self._define_ref(name, load=(VAR_LOAD_PARAMETER, None)) + + def load(self, name: str) -> None: + if self.find_ref(name) is None: + self._define_ref(name, load=(VAR_LOAD_RESOLVE, name)) + + def branch_update(self, branch_symbols: t.Sequence["Symbols"]) -> None: + stores: t.Dict[str, int] = {} + for branch in branch_symbols: + for target in branch.stores: + if target in self.stores: + continue + stores[target] = stores.get(target, 0) + 1 + + for sym in branch_symbols: + self.refs.update(sym.refs) + self.loads.update(sym.loads) + self.stores.update(sym.stores) + + for name, branch_count in stores.items(): + if branch_count == len(branch_symbols): + continue + + target = self.find_ref(name) # type: ignore + assert target is not None, "should not happen" + + if self.parent is not None: + outer_target = self.parent.find_ref(name) + if outer_target is not None: + self.loads[target] = (VAR_LOAD_ALIAS, outer_target) + continue + self.loads[target] = (VAR_LOAD_RESOLVE, name) + + def dump_stores(self) -> t.Dict[str, str]: + rv: t.Dict[str, str] = {} + node: t.Optional["Symbols"] = self + + while node is not None: + for name in sorted(node.stores): + if name not in rv: + rv[name] = self.find_ref(name) # type: ignore + + node = node.parent + + return rv + + def dump_param_targets(self) -> t.Set[str]: + rv = set() + node: t.Optional["Symbols"] = self + + while node is not None: + for target, (instr, _) in self.loads.items(): + if instr == VAR_LOAD_PARAMETER: + rv.add(target) + + node = node.parent + + return rv + + +class RootVisitor(NodeVisitor): + def __init__(self, symbols: "Symbols") -> None: + self.sym_visitor = FrameSymbolVisitor(symbols) + + def _simple_visit(self, node: nodes.Node, **kwargs: t.Any) -> None: + for child in node.iter_child_nodes(): + self.sym_visitor.visit(child) + + visit_Template = _simple_visit + visit_Block = _simple_visit + visit_Macro = _simple_visit + visit_FilterBlock = _simple_visit + visit_Scope = _simple_visit + visit_If = _simple_visit + visit_ScopedEvalContextModifier = _simple_visit + + def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None: + for child in node.body: + self.sym_visitor.visit(child) + + def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None: + for child in node.iter_child_nodes(exclude=("call",)): + self.sym_visitor.visit(child) + + def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None: + for child in node.body: + self.sym_visitor.visit(child) + + def visit_For( + self, node: nodes.For, for_branch: str = "body", **kwargs: t.Any + ) -> None: + if for_branch == "body": + self.sym_visitor.visit(node.target, store_as_param=True) + branch = node.body + elif for_branch == "else": + branch = node.else_ + elif for_branch == "test": + self.sym_visitor.visit(node.target, store_as_param=True) + if node.test is not None: + self.sym_visitor.visit(node.test) + return + else: + raise RuntimeError("Unknown for branch") + + if branch: + for item in branch: + self.sym_visitor.visit(item) + + def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None: + for target in node.targets: + self.sym_visitor.visit(target) + for child in node.body: + self.sym_visitor.visit(child) + + def generic_visit(self, node: nodes.Node, *args: t.Any, **kwargs: t.Any) -> None: + raise NotImplementedError(f"Cannot find symbols for {type(node).__name__!r}") + + +class FrameSymbolVisitor(NodeVisitor): + """A visitor for `Frame.inspect`.""" + + def __init__(self, symbols: "Symbols") -> None: + self.symbols = symbols + + def visit_Name( + self, node: nodes.Name, store_as_param: bool = False, **kwargs: t.Any + ) -> None: + """All assignments to names go through this function.""" + if store_as_param or node.ctx == "param": + self.symbols.declare_parameter(node.name) + elif node.ctx == "store": + self.symbols.store(node.name) + elif node.ctx == "load": + self.symbols.load(node.name) + + def visit_NSRef(self, node: nodes.NSRef, **kwargs: t.Any) -> None: + self.symbols.load(node.name) + + def visit_If(self, node: nodes.If, **kwargs: t.Any) -> None: + self.visit(node.test, **kwargs) + original_symbols = self.symbols + + def inner_visit(nodes: t.Iterable[nodes.Node]) -> "Symbols": + self.symbols = rv = original_symbols.copy() + + for subnode in nodes: + self.visit(subnode, **kwargs) + + self.symbols = original_symbols + return rv + + body_symbols = inner_visit(node.body) + elif_symbols = inner_visit(node.elif_) + else_symbols = inner_visit(node.else_ or ()) + self.symbols.branch_update([body_symbols, elif_symbols, else_symbols]) + + def visit_Macro(self, node: nodes.Macro, **kwargs: t.Any) -> None: + self.symbols.store(node.name) + + def visit_Import(self, node: nodes.Import, **kwargs: t.Any) -> None: + self.generic_visit(node, **kwargs) + self.symbols.store(node.target) + + def visit_FromImport(self, node: nodes.FromImport, **kwargs: t.Any) -> None: + self.generic_visit(node, **kwargs) + + for name in node.names: + if isinstance(name, tuple): + self.symbols.store(name[1]) + else: + self.symbols.store(name) + + def visit_Assign(self, node: nodes.Assign, **kwargs: t.Any) -> None: + """Visit assignments in the correct order.""" + self.visit(node.node, **kwargs) + self.visit(node.target, **kwargs) + + def visit_For(self, node: nodes.For, **kwargs: t.Any) -> None: + """Visiting stops at for blocks. However the block sequence + is visited as part of the outer scope. + """ + self.visit(node.iter, **kwargs) + + def visit_CallBlock(self, node: nodes.CallBlock, **kwargs: t.Any) -> None: + self.visit(node.call, **kwargs) + + def visit_FilterBlock(self, node: nodes.FilterBlock, **kwargs: t.Any) -> None: + self.visit(node.filter, **kwargs) + + def visit_With(self, node: nodes.With, **kwargs: t.Any) -> None: + for target in node.values: + self.visit(target) + + def visit_AssignBlock(self, node: nodes.AssignBlock, **kwargs: t.Any) -> None: + """Stop visiting at block assigns.""" + self.visit(node.target, **kwargs) + + def visit_Scope(self, node: nodes.Scope, **kwargs: t.Any) -> None: + """Stop visiting at scopes.""" + + def visit_Block(self, node: nodes.Block, **kwargs: t.Any) -> None: + """Stop visiting at blocks.""" + + def visit_OverlayScope(self, node: nodes.OverlayScope, **kwargs: t.Any) -> None: + """Do not visit into overlay scopes.""" diff --git a/backend/test/lib/python3.8/site-packages/jinja2/lexer.py b/backend/test/lib/python3.8/site-packages/jinja2/lexer.py new file mode 100644 index 0000000000000000000000000000000000000000..aff7e9f993792e1ced39c93fc0d39dcb5bdd5fde --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/lexer.py @@ -0,0 +1,866 @@ +"""Implements a Jinja / Python combination lexer. The ``Lexer`` class +is used to do some preprocessing. It filters out invalid operators like +the bitshift operators we don't allow in templates. It separates +template code and python code in expressions. +""" +import re +import typing as t +from ast import literal_eval +from collections import deque +from sys import intern + +from ._identifier import pattern as name_re +from .exceptions import TemplateSyntaxError +from .utils import LRUCache + +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + +# cache for the lexers. Exists in order to be able to have multiple +# environments with the same lexer +_lexer_cache: t.MutableMapping[t.Tuple, "Lexer"] = LRUCache(50) # type: ignore + +# static regular expressions +whitespace_re = re.compile(r"\s+") +newline_re = re.compile(r"(\r\n|\r|\n)") +string_re = re.compile( + r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)")', re.S +) +integer_re = re.compile( + r""" + ( + 0b(_?[0-1])+ # binary + | + 0o(_?[0-7])+ # octal + | + 0x(_?[\da-f])+ # hex + | + [1-9](_?\d)* # decimal + | + 0(_?0)* # decimal zero + ) + """, + re.IGNORECASE | re.VERBOSE, +) +float_re = re.compile( + r""" + (?<!\.) # doesn't start with a . + (\d+_)*\d+ # digits, possibly _ separated + ( + (\.(\d+_)*\d+)? # optional fractional part + e[+\-]?(\d+_)*\d+ # exponent part + | + \.(\d+_)*\d+ # required fractional part + ) + """, + re.IGNORECASE | re.VERBOSE, +) + +# internal the tokens and keep references to them +TOKEN_ADD = intern("add") +TOKEN_ASSIGN = intern("assign") +TOKEN_COLON = intern("colon") +TOKEN_COMMA = intern("comma") +TOKEN_DIV = intern("div") +TOKEN_DOT = intern("dot") +TOKEN_EQ = intern("eq") +TOKEN_FLOORDIV = intern("floordiv") +TOKEN_GT = intern("gt") +TOKEN_GTEQ = intern("gteq") +TOKEN_LBRACE = intern("lbrace") +TOKEN_LBRACKET = intern("lbracket") +TOKEN_LPAREN = intern("lparen") +TOKEN_LT = intern("lt") +TOKEN_LTEQ = intern("lteq") +TOKEN_MOD = intern("mod") +TOKEN_MUL = intern("mul") +TOKEN_NE = intern("ne") +TOKEN_PIPE = intern("pipe") +TOKEN_POW = intern("pow") +TOKEN_RBRACE = intern("rbrace") +TOKEN_RBRACKET = intern("rbracket") +TOKEN_RPAREN = intern("rparen") +TOKEN_SEMICOLON = intern("semicolon") +TOKEN_SUB = intern("sub") +TOKEN_TILDE = intern("tilde") +TOKEN_WHITESPACE = intern("whitespace") +TOKEN_FLOAT = intern("float") +TOKEN_INTEGER = intern("integer") +TOKEN_NAME = intern("name") +TOKEN_STRING = intern("string") +TOKEN_OPERATOR = intern("operator") +TOKEN_BLOCK_BEGIN = intern("block_begin") +TOKEN_BLOCK_END = intern("block_end") +TOKEN_VARIABLE_BEGIN = intern("variable_begin") +TOKEN_VARIABLE_END = intern("variable_end") +TOKEN_RAW_BEGIN = intern("raw_begin") +TOKEN_RAW_END = intern("raw_end") +TOKEN_COMMENT_BEGIN = intern("comment_begin") +TOKEN_COMMENT_END = intern("comment_end") +TOKEN_COMMENT = intern("comment") +TOKEN_LINESTATEMENT_BEGIN = intern("linestatement_begin") +TOKEN_LINESTATEMENT_END = intern("linestatement_end") +TOKEN_LINECOMMENT_BEGIN = intern("linecomment_begin") +TOKEN_LINECOMMENT_END = intern("linecomment_end") +TOKEN_LINECOMMENT = intern("linecomment") +TOKEN_DATA = intern("data") +TOKEN_INITIAL = intern("initial") +TOKEN_EOF = intern("eof") + +# bind operators to token types +operators = { + "+": TOKEN_ADD, + "-": TOKEN_SUB, + "/": TOKEN_DIV, + "//": TOKEN_FLOORDIV, + "*": TOKEN_MUL, + "%": TOKEN_MOD, + "**": TOKEN_POW, + "~": TOKEN_TILDE, + "[": TOKEN_LBRACKET, + "]": TOKEN_RBRACKET, + "(": TOKEN_LPAREN, + ")": TOKEN_RPAREN, + "{": TOKEN_LBRACE, + "}": TOKEN_RBRACE, + "==": TOKEN_EQ, + "!=": TOKEN_NE, + ">": TOKEN_GT, + ">=": TOKEN_GTEQ, + "<": TOKEN_LT, + "<=": TOKEN_LTEQ, + "=": TOKEN_ASSIGN, + ".": TOKEN_DOT, + ":": TOKEN_COLON, + "|": TOKEN_PIPE, + ",": TOKEN_COMMA, + ";": TOKEN_SEMICOLON, +} + +reverse_operators = {v: k for k, v in operators.items()} +assert len(operators) == len(reverse_operators), "operators dropped" +operator_re = re.compile( + f"({'|'.join(re.escape(x) for x in sorted(operators, key=lambda x: -len(x)))})" +) + +ignored_tokens = frozenset( + [ + TOKEN_COMMENT_BEGIN, + TOKEN_COMMENT, + TOKEN_COMMENT_END, + TOKEN_WHITESPACE, + TOKEN_LINECOMMENT_BEGIN, + TOKEN_LINECOMMENT_END, + TOKEN_LINECOMMENT, + ] +) +ignore_if_empty = frozenset( + [TOKEN_WHITESPACE, TOKEN_DATA, TOKEN_COMMENT, TOKEN_LINECOMMENT] +) + + +def _describe_token_type(token_type: str) -> str: + if token_type in reverse_operators: + return reverse_operators[token_type] + + return { + TOKEN_COMMENT_BEGIN: "begin of comment", + TOKEN_COMMENT_END: "end of comment", + TOKEN_COMMENT: "comment", + TOKEN_LINECOMMENT: "comment", + TOKEN_BLOCK_BEGIN: "begin of statement block", + TOKEN_BLOCK_END: "end of statement block", + TOKEN_VARIABLE_BEGIN: "begin of print statement", + TOKEN_VARIABLE_END: "end of print statement", + TOKEN_LINESTATEMENT_BEGIN: "begin of line statement", + TOKEN_LINESTATEMENT_END: "end of line statement", + TOKEN_DATA: "template data / text", + TOKEN_EOF: "end of template", + }.get(token_type, token_type) + + +def describe_token(token: "Token") -> str: + """Returns a description of the token.""" + if token.type == TOKEN_NAME: + return token.value + + return _describe_token_type(token.type) + + +def describe_token_expr(expr: str) -> str: + """Like `describe_token` but for token expressions.""" + if ":" in expr: + type, value = expr.split(":", 1) + + if type == TOKEN_NAME: + return value + else: + type = expr + + return _describe_token_type(type) + + +def count_newlines(value: str) -> int: + """Count the number of newline characters in the string. This is + useful for extensions that filter a stream. + """ + return len(newline_re.findall(value)) + + +def compile_rules(environment: "Environment") -> t.List[t.Tuple[str, str]]: + """Compiles all the rules from the environment into a list of rules.""" + e = re.escape + rules = [ + ( + len(environment.comment_start_string), + TOKEN_COMMENT_BEGIN, + e(environment.comment_start_string), + ), + ( + len(environment.block_start_string), + TOKEN_BLOCK_BEGIN, + e(environment.block_start_string), + ), + ( + len(environment.variable_start_string), + TOKEN_VARIABLE_BEGIN, + e(environment.variable_start_string), + ), + ] + + if environment.line_statement_prefix is not None: + rules.append( + ( + len(environment.line_statement_prefix), + TOKEN_LINESTATEMENT_BEGIN, + r"^[ \t\v]*" + e(environment.line_statement_prefix), + ) + ) + if environment.line_comment_prefix is not None: + rules.append( + ( + len(environment.line_comment_prefix), + TOKEN_LINECOMMENT_BEGIN, + r"(?:^|(?<=\S))[^\S\r\n]*" + e(environment.line_comment_prefix), + ) + ) + + return [x[1:] for x in sorted(rules, reverse=True)] + + +class Failure: + """Class that raises a `TemplateSyntaxError` if called. + Used by the `Lexer` to specify known errors. + """ + + def __init__( + self, message: str, cls: t.Type[TemplateSyntaxError] = TemplateSyntaxError + ) -> None: + self.message = message + self.error_class = cls + + def __call__(self, lineno: int, filename: str) -> "te.NoReturn": + raise self.error_class(self.message, lineno, filename) + + +class Token(t.NamedTuple): + lineno: int + type: str + value: str + + def __str__(self) -> str: + return describe_token(self) + + def test(self, expr: str) -> bool: + """Test a token against a token expression. This can either be a + token type or ``'token_type:token_value'``. This can only test + against string values and types. + """ + # here we do a regular string equality check as test_any is usually + # passed an iterable of not interned strings. + if self.type == expr: + return True + + if ":" in expr: + return expr.split(":", 1) == [self.type, self.value] + + return False + + def test_any(self, *iterable: str) -> bool: + """Test against multiple token expressions.""" + return any(self.test(expr) for expr in iterable) + + +class TokenStreamIterator: + """The iterator for tokenstreams. Iterate over the stream + until the eof token is reached. + """ + + def __init__(self, stream: "TokenStream") -> None: + self.stream = stream + + def __iter__(self) -> "TokenStreamIterator": + return self + + def __next__(self) -> Token: + token = self.stream.current + + if token.type is TOKEN_EOF: + self.stream.close() + raise StopIteration + + next(self.stream) + return token + + +class TokenStream: + """A token stream is an iterable that yields :class:`Token`\\s. The + parser however does not iterate over it but calls :meth:`next` to go + one token ahead. The current active token is stored as :attr:`current`. + """ + + def __init__( + self, + generator: t.Iterable[Token], + name: t.Optional[str], + filename: t.Optional[str], + ): + self._iter = iter(generator) + self._pushed: "te.Deque[Token]" = deque() + self.name = name + self.filename = filename + self.closed = False + self.current = Token(1, TOKEN_INITIAL, "") + next(self) + + def __iter__(self) -> TokenStreamIterator: + return TokenStreamIterator(self) + + def __bool__(self) -> bool: + return bool(self._pushed) or self.current.type is not TOKEN_EOF + + @property + def eos(self) -> bool: + """Are we at the end of the stream?""" + return not self + + def push(self, token: Token) -> None: + """Push a token back to the stream.""" + self._pushed.append(token) + + def look(self) -> Token: + """Look at the next token.""" + old_token = next(self) + result = self.current + self.push(result) + self.current = old_token + return result + + def skip(self, n: int = 1) -> None: + """Got n tokens ahead.""" + for _ in range(n): + next(self) + + def next_if(self, expr: str) -> t.Optional[Token]: + """Perform the token test and return the token if it matched. + Otherwise the return value is `None`. + """ + if self.current.test(expr): + return next(self) + + return None + + def skip_if(self, expr: str) -> bool: + """Like :meth:`next_if` but only returns `True` or `False`.""" + return self.next_if(expr) is not None + + def __next__(self) -> Token: + """Go one token ahead and return the old one. + + Use the built-in :func:`next` instead of calling this directly. + """ + rv = self.current + + if self._pushed: + self.current = self._pushed.popleft() + elif self.current.type is not TOKEN_EOF: + try: + self.current = next(self._iter) + except StopIteration: + self.close() + + return rv + + def close(self) -> None: + """Close the stream.""" + self.current = Token(self.current.lineno, TOKEN_EOF, "") + self._iter = iter(()) + self.closed = True + + def expect(self, expr: str) -> Token: + """Expect a given token type and return it. This accepts the same + argument as :meth:`jinja2.lexer.Token.test`. + """ + if not self.current.test(expr): + expr = describe_token_expr(expr) + + if self.current.type is TOKEN_EOF: + raise TemplateSyntaxError( + f"unexpected end of template, expected {expr!r}.", + self.current.lineno, + self.name, + self.filename, + ) + + raise TemplateSyntaxError( + f"expected token {expr!r}, got {describe_token(self.current)!r}", + self.current.lineno, + self.name, + self.filename, + ) + + return next(self) + + +def get_lexer(environment: "Environment") -> "Lexer": + """Return a lexer which is probably cached.""" + key = ( + environment.block_start_string, + environment.block_end_string, + environment.variable_start_string, + environment.variable_end_string, + environment.comment_start_string, + environment.comment_end_string, + environment.line_statement_prefix, + environment.line_comment_prefix, + environment.trim_blocks, + environment.lstrip_blocks, + environment.newline_sequence, + environment.keep_trailing_newline, + ) + lexer = _lexer_cache.get(key) + + if lexer is None: + _lexer_cache[key] = lexer = Lexer(environment) + + return lexer + + +class OptionalLStrip(tuple): + """A special tuple for marking a point in the state that can have + lstrip applied. + """ + + __slots__ = () + + # Even though it looks like a no-op, creating instances fails + # without this. + def __new__(cls, *members, **kwargs): # type: ignore + return super().__new__(cls, members) + + +class _Rule(t.NamedTuple): + pattern: t.Pattern[str] + tokens: t.Union[str, t.Tuple[str, ...], t.Tuple[Failure]] + command: t.Optional[str] + + +class Lexer: + """Class that implements a lexer for a given environment. Automatically + created by the environment class, usually you don't have to do that. + + Note that the lexer is not automatically bound to an environment. + Multiple environments can share the same lexer. + """ + + def __init__(self, environment: "Environment") -> None: + # shortcuts + e = re.escape + + def c(x: str) -> t.Pattern[str]: + return re.compile(x, re.M | re.S) + + # lexing rules for tags + tag_rules: t.List[_Rule] = [ + _Rule(whitespace_re, TOKEN_WHITESPACE, None), + _Rule(float_re, TOKEN_FLOAT, None), + _Rule(integer_re, TOKEN_INTEGER, None), + _Rule(name_re, TOKEN_NAME, None), + _Rule(string_re, TOKEN_STRING, None), + _Rule(operator_re, TOKEN_OPERATOR, None), + ] + + # assemble the root lexing rule. because "|" is ungreedy + # we have to sort by length so that the lexer continues working + # as expected when we have parsing rules like <% for block and + # <%= for variables. (if someone wants asp like syntax) + # variables are just part of the rules if variable processing + # is required. + root_tag_rules = compile_rules(environment) + + block_start_re = e(environment.block_start_string) + block_end_re = e(environment.block_end_string) + comment_end_re = e(environment.comment_end_string) + variable_end_re = e(environment.variable_end_string) + + # block suffix if trimming is enabled + block_suffix_re = "\\n?" if environment.trim_blocks else "" + + self.lstrip_blocks = environment.lstrip_blocks + + self.newline_sequence = environment.newline_sequence + self.keep_trailing_newline = environment.keep_trailing_newline + + root_raw_re = ( + rf"(?P<raw_begin>{block_start_re}(\-|\+|)\s*raw\s*" + rf"(?:\-{block_end_re}\s*|{block_end_re}))" + ) + root_parts_re = "|".join( + [root_raw_re] + [rf"(?P<{n}>{r}(\-|\+|))" for n, r in root_tag_rules] + ) + + # global lexing rules + self.rules: t.Dict[str, t.List[_Rule]] = { + "root": [ + # directives + _Rule( + c(rf"(.*?)(?:{root_parts_re})"), + OptionalLStrip(TOKEN_DATA, "#bygroup"), # type: ignore + "#bygroup", + ), + # data + _Rule(c(".+"), TOKEN_DATA, None), + ], + # comments + TOKEN_COMMENT_BEGIN: [ + _Rule( + c( + rf"(.*?)((?:\+{comment_end_re}|\-{comment_end_re}\s*" + rf"|{comment_end_re}{block_suffix_re}))" + ), + (TOKEN_COMMENT, TOKEN_COMMENT_END), + "#pop", + ), + _Rule(c(r"(.)"), (Failure("Missing end of comment tag"),), None), + ], + # blocks + TOKEN_BLOCK_BEGIN: [ + _Rule( + c( + rf"(?:\+{block_end_re}|\-{block_end_re}\s*" + rf"|{block_end_re}{block_suffix_re})" + ), + TOKEN_BLOCK_END, + "#pop", + ), + ] + + tag_rules, + # variables + TOKEN_VARIABLE_BEGIN: [ + _Rule( + c(rf"\-{variable_end_re}\s*|{variable_end_re}"), + TOKEN_VARIABLE_END, + "#pop", + ) + ] + + tag_rules, + # raw block + TOKEN_RAW_BEGIN: [ + _Rule( + c( + rf"(.*?)((?:{block_start_re}(\-|\+|))\s*endraw\s*" + rf"(?:\+{block_end_re}|\-{block_end_re}\s*" + rf"|{block_end_re}{block_suffix_re}))" + ), + OptionalLStrip(TOKEN_DATA, TOKEN_RAW_END), # type: ignore + "#pop", + ), + _Rule(c(r"(.)"), (Failure("Missing end of raw directive"),), None), + ], + # line statements + TOKEN_LINESTATEMENT_BEGIN: [ + _Rule(c(r"\s*(\n|$)"), TOKEN_LINESTATEMENT_END, "#pop") + ] + + tag_rules, + # line comments + TOKEN_LINECOMMENT_BEGIN: [ + _Rule( + c(r"(.*?)()(?=\n|$)"), + (TOKEN_LINECOMMENT, TOKEN_LINECOMMENT_END), + "#pop", + ) + ], + } + + def _normalize_newlines(self, value: str) -> str: + """Replace all newlines with the configured sequence in strings + and template data. + """ + return newline_re.sub(self.newline_sequence, value) + + def tokenize( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> TokenStream: + """Calls tokeniter + tokenize and wraps it in a token stream.""" + stream = self.tokeniter(source, name, filename, state) + return TokenStream(self.wrap(stream, name, filename), name, filename) + + def wrap( + self, + stream: t.Iterable[t.Tuple[int, str, str]], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> t.Iterator[Token]: + """This is called with the stream as returned by `tokenize` and wraps + every token in a :class:`Token` and converts the value. + """ + for lineno, token, value_str in stream: + if token in ignored_tokens: + continue + + value: t.Any = value_str + + if token == TOKEN_LINESTATEMENT_BEGIN: + token = TOKEN_BLOCK_BEGIN + elif token == TOKEN_LINESTATEMENT_END: + token = TOKEN_BLOCK_END + # we are not interested in those tokens in the parser + elif token in (TOKEN_RAW_BEGIN, TOKEN_RAW_END): + continue + elif token == TOKEN_DATA: + value = self._normalize_newlines(value_str) + elif token == "keyword": + token = value_str + elif token == TOKEN_NAME: + value = value_str + + if not value.isidentifier(): + raise TemplateSyntaxError( + "Invalid character in identifier", lineno, name, filename + ) + elif token == TOKEN_STRING: + # try to unescape string + try: + value = ( + self._normalize_newlines(value_str[1:-1]) + .encode("ascii", "backslashreplace") + .decode("unicode-escape") + ) + except Exception as e: + msg = str(e).split(":")[-1].strip() + raise TemplateSyntaxError(msg, lineno, name, filename) from e + elif token == TOKEN_INTEGER: + value = int(value_str.replace("_", ""), 0) + elif token == TOKEN_FLOAT: + # remove all "_" first to support more Python versions + value = literal_eval(value_str.replace("_", "")) + elif token == TOKEN_OPERATOR: + token = operators[value_str] + + yield Token(lineno, token, value) + + def tokeniter( + self, + source: str, + name: t.Optional[str], + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> t.Iterator[t.Tuple[int, str, str]]: + """This method tokenizes the text and returns the tokens in a + generator. Use this method if you just want to tokenize a template. + + .. versionchanged:: 3.0 + Only ``\\n``, ``\\r\\n`` and ``\\r`` are treated as line + breaks. + """ + lines = newline_re.split(source)[::2] + + if not self.keep_trailing_newline and lines[-1] == "": + del lines[-1] + + source = "\n".join(lines) + pos = 0 + lineno = 1 + stack = ["root"] + + if state is not None and state != "root": + assert state in ("variable", "block"), "invalid state" + stack.append(state + "_begin") + + statetokens = self.rules[stack[-1]] + source_length = len(source) + balancing_stack: t.List[str] = [] + newlines_stripped = 0 + line_starting = True + + while True: + # tokenizer loop + for regex, tokens, new_state in statetokens: + m = regex.match(source, pos) + + # if no match we try again with the next rule + if m is None: + continue + + # we only match blocks and variables if braces / parentheses + # are balanced. continue parsing with the lower rule which + # is the operator rule. do this only if the end tags look + # like operators + if balancing_stack and tokens in ( + TOKEN_VARIABLE_END, + TOKEN_BLOCK_END, + TOKEN_LINESTATEMENT_END, + ): + continue + + # tuples support more options + if isinstance(tokens, tuple): + groups: t.Sequence[str] = m.groups() + + if isinstance(tokens, OptionalLStrip): + # Rule supports lstrip. Match will look like + # text, block type, whitespace control, type, control, ... + text = groups[0] + # Skipping the text and first type, every other group is the + # whitespace control for each type. One of the groups will be + # -, +, or empty string instead of None. + strip_sign = next(g for g in groups[2::2] if g is not None) + + if strip_sign == "-": + # Strip all whitespace between the text and the tag. + stripped = text.rstrip() + newlines_stripped = text[len(stripped) :].count("\n") + groups = [stripped, *groups[1:]] + elif ( + # Not marked for preserving whitespace. + strip_sign != "+" + # lstrip is enabled. + and self.lstrip_blocks + # Not a variable expression. + and not m.groupdict().get(TOKEN_VARIABLE_BEGIN) + ): + # The start of text between the last newline and the tag. + l_pos = text.rfind("\n") + 1 + + if l_pos > 0 or line_starting: + # If there's only whitespace between the newline and the + # tag, strip it. + if whitespace_re.fullmatch(text, l_pos): + groups = [text[:l_pos], *groups[1:]] + + for idx, token in enumerate(tokens): + # failure group + if token.__class__ is Failure: + raise token(lineno, filename) + # bygroup is a bit more complex, in that case we + # yield for the current token the first named + # group that matched + elif token == "#bygroup": + for key, value in m.groupdict().items(): + if value is not None: + yield lineno, key, value + lineno += value.count("\n") + break + else: + raise RuntimeError( + f"{regex!r} wanted to resolve the token dynamically" + " but no group matched" + ) + # normal group + else: + data = groups[idx] + + if data or token not in ignore_if_empty: + yield lineno, token, data + + lineno += data.count("\n") + newlines_stripped + newlines_stripped = 0 + + # strings as token just are yielded as it. + else: + data = m.group() + + # update brace/parentheses balance + if tokens == TOKEN_OPERATOR: + if data == "{": + balancing_stack.append("}") + elif data == "(": + balancing_stack.append(")") + elif data == "[": + balancing_stack.append("]") + elif data in ("}", ")", "]"): + if not balancing_stack: + raise TemplateSyntaxError( + f"unexpected '{data}'", lineno, name, filename + ) + + expected_op = balancing_stack.pop() + + if expected_op != data: + raise TemplateSyntaxError( + f"unexpected '{data}', expected '{expected_op}'", + lineno, + name, + filename, + ) + + # yield items + if data or tokens not in ignore_if_empty: + yield lineno, tokens, data + + lineno += data.count("\n") + + line_starting = m.group()[-1:] == "\n" + # fetch new position into new variable so that we can check + # if there is a internal parsing error which would result + # in an infinite loop + pos2 = m.end() + + # handle state changes + if new_state is not None: + # remove the uppermost state + if new_state == "#pop": + stack.pop() + # resolve the new state by group checking + elif new_state == "#bygroup": + for key, value in m.groupdict().items(): + if value is not None: + stack.append(key) + break + else: + raise RuntimeError( + f"{regex!r} wanted to resolve the new state dynamically" + f" but no group matched" + ) + # direct state name given + else: + stack.append(new_state) + + statetokens = self.rules[stack[-1]] + # we are still at the same position and no stack change. + # this means a loop without break condition, avoid that and + # raise error + elif pos2 == pos: + raise RuntimeError( + f"{regex!r} yielded empty string without stack change" + ) + + # publish new function and start again + pos = pos2 + break + # if loop terminated without break we haven't found a single match + # either we are at the end of the file or we have a problem + else: + # end of text + if pos >= source_length: + return + + # something went wrong + raise TemplateSyntaxError( + f"unexpected char {source[pos]!r} at {pos}", lineno, name, filename + ) diff --git a/backend/test/lib/python3.8/site-packages/jinja2/loaders.py b/backend/test/lib/python3.8/site-packages/jinja2/loaders.py new file mode 100644 index 0000000000000000000000000000000000000000..d2f98093cde425fad2c4bbf2a07e383fce5e4a38 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/loaders.py @@ -0,0 +1,661 @@ +"""API and implementations for loading templates from different data +sources. +""" +import importlib.util +import os +import posixpath +import sys +import typing as t +import weakref +import zipimport +from collections import abc +from hashlib import sha1 +from importlib import import_module +from types import ModuleType + +from .exceptions import TemplateNotFound +from .utils import internalcode +from .utils import open_if_exists + +if t.TYPE_CHECKING: + from .environment import Environment + from .environment import Template + + +def split_template_path(template: str) -> t.List[str]: + """Split a path into segments and perform a sanity check. If it detects + '..' in the path it will raise a `TemplateNotFound` error. + """ + pieces = [] + for piece in template.split("/"): + if ( + os.path.sep in piece + or (os.path.altsep and os.path.altsep in piece) + or piece == os.path.pardir + ): + raise TemplateNotFound(template) + elif piece and piece != ".": + pieces.append(piece) + return pieces + + +class BaseLoader: + """Baseclass for all loaders. Subclass this and override `get_source` to + implement a custom loading mechanism. The environment provides a + `get_template` method that calls the loader's `load` method to get the + :class:`Template` object. + + A very basic example for a loader that looks up templates on the file + system could look like this:: + + from jinja2 import BaseLoader, TemplateNotFound + from os.path import join, exists, getmtime + + class MyLoader(BaseLoader): + + def __init__(self, path): + self.path = path + + def get_source(self, environment, template): + path = join(self.path, template) + if not exists(path): + raise TemplateNotFound(template) + mtime = getmtime(path) + with open(path) as f: + source = f.read() + return source, path, lambda: mtime == getmtime(path) + """ + + #: if set to `False` it indicates that the loader cannot provide access + #: to the source of templates. + #: + #: .. versionadded:: 2.4 + has_source_access = True + + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: + """Get the template source, filename and reload helper for a template. + It's passed the environment and template name and has to return a + tuple in the form ``(source, filename, uptodate)`` or raise a + `TemplateNotFound` error if it can't locate the template. + + The source part of the returned tuple must be the source of the + template as a string. The filename should be the name of the + file on the filesystem if it was loaded from there, otherwise + ``None``. The filename is used by Python for the tracebacks + if no loader extension is used. + + The last item in the tuple is the `uptodate` function. If auto + reloading is enabled it's always called to check if the template + changed. No arguments are passed so the function must store the + old state somewhere (for example in a closure). If it returns `False` + the template will be reloaded. + """ + if not self.has_source_access: + raise RuntimeError( + f"{type(self).__name__} cannot provide access to the source" + ) + raise TemplateNotFound(template) + + def list_templates(self) -> t.List[str]: + """Iterates over all templates. If the loader does not support that + it should raise a :exc:`TypeError` which is the default behavior. + """ + raise TypeError("this loader cannot iterate over all templates") + + @internalcode + def load( + self, + environment: "Environment", + name: str, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + """Loads a template. This method looks up the template in the cache + or loads one by calling :meth:`get_source`. Subclasses should not + override this method as loaders working on collections of other + loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`) + will not call this method but `get_source` directly. + """ + code = None + if globals is None: + globals = {} + + # first we try to get the source for this template together + # with the filename and the uptodate function. + source, filename, uptodate = self.get_source(environment, name) + + # try to load the code from the bytecode cache if there is a + # bytecode cache configured. + bcc = environment.bytecode_cache + if bcc is not None: + bucket = bcc.get_bucket(environment, name, filename, source) + code = bucket.code + + # if we don't have code so far (not cached, no longer up to + # date) etc. we compile the template + if code is None: + code = environment.compile(source, name, filename) + + # if the bytecode cache is available and the bucket doesn't + # have a code so far, we give the bucket the new code and put + # it back to the bytecode cache. + if bcc is not None and bucket.code is None: + bucket.code = code + bcc.set_bucket(bucket) + + return environment.template_class.from_code( + environment, code, globals, uptodate + ) + + +class FileSystemLoader(BaseLoader): + """Load templates from a directory in the file system. + + The path can be relative or absolute. Relative paths are relative to + the current working directory. + + .. code-block:: python + + loader = FileSystemLoader("templates") + + A list of paths can be given. The directories will be searched in + order, stopping at the first matching template. + + .. code-block:: python + + loader = FileSystemLoader(["/override/templates", "/default/templates"]) + + :param searchpath: A path, or list of paths, to the directory that + contains the templates. + :param encoding: Use this encoding to read the text from template + files. + :param followlinks: Follow symbolic links in the path. + + .. versionchanged:: 2.8 + Added the ``followlinks`` parameter. + """ + + def __init__( + self, + searchpath: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]], + encoding: str = "utf-8", + followlinks: bool = False, + ) -> None: + if not isinstance(searchpath, abc.Iterable) or isinstance(searchpath, str): + searchpath = [searchpath] + + self.searchpath = [os.fspath(p) for p in searchpath] + self.encoding = encoding + self.followlinks = followlinks + + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, str, t.Callable[[], bool]]: + pieces = split_template_path(template) + for searchpath in self.searchpath: + # Use posixpath even on Windows to avoid "drive:" or UNC + # segments breaking out of the search directory. + filename = posixpath.join(searchpath, *pieces) + f = open_if_exists(filename) + if f is None: + continue + try: + contents = f.read().decode(self.encoding) + finally: + f.close() + + mtime = os.path.getmtime(filename) + + def uptodate() -> bool: + try: + return os.path.getmtime(filename) == mtime + except OSError: + return False + + # Use normpath to convert Windows altsep to sep. + return contents, os.path.normpath(filename), uptodate + raise TemplateNotFound(template) + + def list_templates(self) -> t.List[str]: + found = set() + for searchpath in self.searchpath: + walk_dir = os.walk(searchpath, followlinks=self.followlinks) + for dirpath, _, filenames in walk_dir: + for filename in filenames: + template = ( + os.path.join(dirpath, filename)[len(searchpath) :] + .strip(os.path.sep) + .replace(os.path.sep, "/") + ) + if template[:2] == "./": + template = template[2:] + if template not in found: + found.add(template) + return sorted(found) + + +class PackageLoader(BaseLoader): + """Load templates from a directory in a Python package. + + :param package_name: Import name of the package that contains the + template directory. + :param package_path: Directory within the imported package that + contains the templates. + :param encoding: Encoding of template files. + + The following example looks up templates in the ``pages`` directory + within the ``project.ui`` package. + + .. code-block:: python + + loader = PackageLoader("project.ui", "pages") + + Only packages installed as directories (standard pip behavior) or + zip/egg files (less common) are supported. The Python API for + introspecting data in packages is too limited to support other + installation methods the way this loader requires. + + There is limited support for :pep:`420` namespace packages. The + template directory is assumed to only be in one namespace + contributor. Zip files contributing to a namespace are not + supported. + + .. versionchanged:: 3.0 + No longer uses ``setuptools`` as a dependency. + + .. versionchanged:: 3.0 + Limited PEP 420 namespace package support. + """ + + def __init__( + self, + package_name: str, + package_path: "str" = "templates", + encoding: str = "utf-8", + ) -> None: + package_path = os.path.normpath(package_path).rstrip(os.path.sep) + + # normpath preserves ".", which isn't valid in zip paths. + if package_path == os.path.curdir: + package_path = "" + elif package_path[:2] == os.path.curdir + os.path.sep: + package_path = package_path[2:] + + self.package_path = package_path + self.package_name = package_name + self.encoding = encoding + + # Make sure the package exists. This also makes namespace + # packages work, otherwise get_loader returns None. + import_module(package_name) + spec = importlib.util.find_spec(package_name) + assert spec is not None, "An import spec was not found for the package." + loader = spec.loader + assert loader is not None, "A loader was not found for the package." + self._loader = loader + self._archive = None + template_root = None + + if isinstance(loader, zipimport.zipimporter): + self._archive = loader.archive + pkgdir = next(iter(spec.submodule_search_locations)) # type: ignore + template_root = os.path.join(pkgdir, package_path).rstrip(os.path.sep) + else: + roots: t.List[str] = [] + + # One element for regular packages, multiple for namespace + # packages, or None for single module file. + if spec.submodule_search_locations: + roots.extend(spec.submodule_search_locations) + # A single module file, use the parent directory instead. + elif spec.origin is not None: + roots.append(os.path.dirname(spec.origin)) + + for root in roots: + root = os.path.join(root, package_path) + + if os.path.isdir(root): + template_root = root + break + + if template_root is None: + raise ValueError( + f"The {package_name!r} package was not installed in a" + " way that PackageLoader understands." + ) + + self._template_root = template_root + + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, str, t.Optional[t.Callable[[], bool]]]: + # Use posixpath even on Windows to avoid "drive:" or UNC + # segments breaking out of the search directory. Use normpath to + # convert Windows altsep to sep. + p = os.path.normpath( + posixpath.join(self._template_root, *split_template_path(template)) + ) + up_to_date: t.Optional[t.Callable[[], bool]] + + if self._archive is None: + # Package is a directory. + if not os.path.isfile(p): + raise TemplateNotFound(template) + + with open(p, "rb") as f: + source = f.read() + + mtime = os.path.getmtime(p) + + def up_to_date() -> bool: + return os.path.isfile(p) and os.path.getmtime(p) == mtime + + else: + # Package is a zip file. + try: + source = self._loader.get_data(p) # type: ignore + except OSError as e: + raise TemplateNotFound(template) from e + + # Could use the zip's mtime for all template mtimes, but + # would need to safely reload the module if it's out of + # date, so just report it as always current. + up_to_date = None + + return source.decode(self.encoding), p, up_to_date + + def list_templates(self) -> t.List[str]: + results: t.List[str] = [] + + if self._archive is None: + # Package is a directory. + offset = len(self._template_root) + + for dirpath, _, filenames in os.walk(self._template_root): + dirpath = dirpath[offset:].lstrip(os.path.sep) + results.extend( + os.path.join(dirpath, name).replace(os.path.sep, "/") + for name in filenames + ) + else: + if not hasattr(self._loader, "_files"): + raise TypeError( + "This zip import does not have the required" + " metadata to list templates." + ) + + # Package is a zip file. + prefix = ( + self._template_root[len(self._archive) :].lstrip(os.path.sep) + + os.path.sep + ) + offset = len(prefix) + + for name in self._loader._files.keys(): # type: ignore + # Find names under the templates directory that aren't directories. + if name.startswith(prefix) and name[-1] != os.path.sep: + results.append(name[offset:].replace(os.path.sep, "/")) + + results.sort() + return results + + +class DictLoader(BaseLoader): + """Loads a template from a Python dict mapping template names to + template source. This loader is useful for unittesting: + + >>> loader = DictLoader({'index.html': 'source here'}) + + Because auto reloading is rarely useful this is disabled per default. + """ + + def __init__(self, mapping: t.Mapping[str, str]) -> None: + self.mapping = mapping + + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, None, t.Callable[[], bool]]: + if template in self.mapping: + source = self.mapping[template] + return source, None, lambda: source == self.mapping.get(template) + raise TemplateNotFound(template) + + def list_templates(self) -> t.List[str]: + return sorted(self.mapping) + + +class FunctionLoader(BaseLoader): + """A loader that is passed a function which does the loading. The + function receives the name of the template and has to return either + a string with the template source, a tuple in the form ``(source, + filename, uptodatefunc)`` or `None` if the template does not exist. + + >>> def load_template(name): + ... if name == 'index.html': + ... return '...' + ... + >>> loader = FunctionLoader(load_template) + + The `uptodatefunc` is a function that is called if autoreload is enabled + and has to return `True` if the template is still up to date. For more + details have a look at :meth:`BaseLoader.get_source` which has the same + return value. + """ + + def __init__( + self, + load_func: t.Callable[ + [str], + t.Optional[ + t.Union[ + str, t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]] + ] + ], + ], + ) -> None: + self.load_func = load_func + + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: + rv = self.load_func(template) + + if rv is None: + raise TemplateNotFound(template) + + if isinstance(rv, str): + return rv, None, None + + return rv + + +class PrefixLoader(BaseLoader): + """A loader that is passed a dict of loaders where each loader is bound + to a prefix. The prefix is delimited from the template by a slash per + default, which can be changed by setting the `delimiter` argument to + something else:: + + loader = PrefixLoader({ + 'app1': PackageLoader('mypackage.app1'), + 'app2': PackageLoader('mypackage.app2') + }) + + By loading ``'app1/index.html'`` the file from the app1 package is loaded, + by loading ``'app2/index.html'`` the file from the second. + """ + + def __init__( + self, mapping: t.Mapping[str, BaseLoader], delimiter: str = "/" + ) -> None: + self.mapping = mapping + self.delimiter = delimiter + + def get_loader(self, template: str) -> t.Tuple[BaseLoader, str]: + try: + prefix, name = template.split(self.delimiter, 1) + loader = self.mapping[prefix] + except (ValueError, KeyError) as e: + raise TemplateNotFound(template) from e + return loader, name + + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: + loader, name = self.get_loader(template) + try: + return loader.get_source(environment, name) + except TemplateNotFound as e: + # re-raise the exception with the correct filename here. + # (the one that includes the prefix) + raise TemplateNotFound(template) from e + + @internalcode + def load( + self, + environment: "Environment", + name: str, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + loader, local_name = self.get_loader(name) + try: + return loader.load(environment, local_name, globals) + except TemplateNotFound as e: + # re-raise the exception with the correct filename here. + # (the one that includes the prefix) + raise TemplateNotFound(name) from e + + def list_templates(self) -> t.List[str]: + result = [] + for prefix, loader in self.mapping.items(): + for template in loader.list_templates(): + result.append(prefix + self.delimiter + template) + return result + + +class ChoiceLoader(BaseLoader): + """This loader works like the `PrefixLoader` just that no prefix is + specified. If a template could not be found by one loader the next one + is tried. + + >>> loader = ChoiceLoader([ + ... FileSystemLoader('/path/to/user/templates'), + ... FileSystemLoader('/path/to/system/templates') + ... ]) + + This is useful if you want to allow users to override builtin templates + from a different location. + """ + + def __init__(self, loaders: t.Sequence[BaseLoader]) -> None: + self.loaders = loaders + + def get_source( + self, environment: "Environment", template: str + ) -> t.Tuple[str, t.Optional[str], t.Optional[t.Callable[[], bool]]]: + for loader in self.loaders: + try: + return loader.get_source(environment, template) + except TemplateNotFound: + pass + raise TemplateNotFound(template) + + @internalcode + def load( + self, + environment: "Environment", + name: str, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + for loader in self.loaders: + try: + return loader.load(environment, name, globals) + except TemplateNotFound: + pass + raise TemplateNotFound(name) + + def list_templates(self) -> t.List[str]: + found = set() + for loader in self.loaders: + found.update(loader.list_templates()) + return sorted(found) + + +class _TemplateModule(ModuleType): + """Like a normal module but with support for weak references""" + + +class ModuleLoader(BaseLoader): + """This loader loads templates from precompiled templates. + + Example usage: + + >>> loader = ChoiceLoader([ + ... ModuleLoader('/path/to/compiled/templates'), + ... FileSystemLoader('/path/to/templates') + ... ]) + + Templates can be precompiled with :meth:`Environment.compile_templates`. + """ + + has_source_access = False + + def __init__( + self, path: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]] + ) -> None: + package_name = f"_jinja2_module_templates_{id(self):x}" + + # create a fake module that looks for the templates in the + # path given. + mod = _TemplateModule(package_name) + + if not isinstance(path, abc.Iterable) or isinstance(path, str): + path = [path] + + mod.__path__ = [os.fspath(p) for p in path] + + sys.modules[package_name] = weakref.proxy( + mod, lambda x: sys.modules.pop(package_name, None) + ) + + # the only strong reference, the sys.modules entry is weak + # so that the garbage collector can remove it once the + # loader that created it goes out of business. + self.module = mod + self.package_name = package_name + + @staticmethod + def get_template_key(name: str) -> str: + return "tmpl_" + sha1(name.encode("utf-8")).hexdigest() + + @staticmethod + def get_module_filename(name: str) -> str: + return ModuleLoader.get_template_key(name) + ".py" + + @internalcode + def load( + self, + environment: "Environment", + name: str, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ) -> "Template": + key = self.get_template_key(name) + module = f"{self.package_name}.{key}" + mod = getattr(self.module, module, None) + + if mod is None: + try: + mod = __import__(module, None, None, ["root"]) + except ImportError as e: + raise TemplateNotFound(name) from e + + # remove the entry from sys.modules, we only want the attribute + # on the module object we have stored on the loader. + sys.modules.pop(module, None) + + if globals is None: + globals = {} + + return environment.template_class.from_module_dict( + environment, mod.__dict__, globals + ) diff --git a/backend/test/lib/python3.8/site-packages/jinja2/meta.py b/backend/test/lib/python3.8/site-packages/jinja2/meta.py new file mode 100644 index 0000000000000000000000000000000000000000..0057d6eabade5e964e6ef0e3ac8ed2dd67494b03 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/meta.py @@ -0,0 +1,111 @@ +"""Functions that expose information about templates that might be +interesting for introspection. +""" +import typing as t + +from . import nodes +from .compiler import CodeGenerator +from .compiler import Frame + +if t.TYPE_CHECKING: + from .environment import Environment + + +class TrackingCodeGenerator(CodeGenerator): + """We abuse the code generator for introspection.""" + + def __init__(self, environment: "Environment") -> None: + super().__init__(environment, "<introspection>", "<introspection>") + self.undeclared_identifiers: t.Set[str] = set() + + def write(self, x: str) -> None: + """Don't write.""" + + def enter_frame(self, frame: Frame) -> None: + """Remember all undeclared identifiers.""" + super().enter_frame(frame) + + for _, (action, param) in frame.symbols.loads.items(): + if action == "resolve" and param not in self.environment.globals: + self.undeclared_identifiers.add(param) + + +def find_undeclared_variables(ast: nodes.Template) -> t.Set[str]: + """Returns a set of all variables in the AST that will be looked up from + the context at runtime. Because at compile time it's not known which + variables will be used depending on the path the execution takes at + runtime, all variables are returned. + + >>> from jinja2 import Environment, meta + >>> env = Environment() + >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}') + >>> meta.find_undeclared_variables(ast) == {'bar'} + True + + .. admonition:: Implementation + + Internally the code generator is used for finding undeclared variables. + This is good to know because the code generator might raise a + :exc:`TemplateAssertionError` during compilation and as a matter of + fact this function can currently raise that exception as well. + """ + codegen = TrackingCodeGenerator(ast.environment) # type: ignore + codegen.visit(ast) + return codegen.undeclared_identifiers + + +_ref_types = (nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include) +_RefType = t.Union[nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include] + + +def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]]: + """Finds all the referenced templates from the AST. This will return an + iterator over all the hardcoded template extensions, inclusions and + imports. If dynamic inheritance or inclusion is used, `None` will be + yielded. + + >>> from jinja2 import Environment, meta + >>> env = Environment() + >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}') + >>> list(meta.find_referenced_templates(ast)) + ['layout.html', None] + + This function is useful for dependency tracking. For example if you want + to rebuild parts of the website after a layout template has changed. + """ + template_name: t.Any + + for node in ast.find_all(_ref_types): + template: nodes.Expr = node.template # type: ignore + + if not isinstance(template, nodes.Const): + # a tuple with some non consts in there + if isinstance(template, (nodes.Tuple, nodes.List)): + for template_name in template.items: + # something const, only yield the strings and ignore + # non-string consts that really just make no sense + if isinstance(template_name, nodes.Const): + if isinstance(template_name.value, str): + yield template_name.value + # something dynamic in there + else: + yield None + # something dynamic we don't know about here + else: + yield None + continue + # constant is a basestring, direct template name + if isinstance(template.value, str): + yield template.value + # a tuple or list (latter *should* not happen) made of consts, + # yield the consts that are strings. We could warn here for + # non string values + elif isinstance(node, nodes.Include) and isinstance( + template.value, (tuple, list) + ): + for template_name in template.value: + if isinstance(template_name, str): + yield template_name + # something else we don't care about, we could warn here + else: + yield None diff --git a/backend/test/lib/python3.8/site-packages/jinja2/nativetypes.py b/backend/test/lib/python3.8/site-packages/jinja2/nativetypes.py new file mode 100644 index 0000000000000000000000000000000000000000..ac0861034821772a50e53bfc3d3ff72e7aad5b1b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/nativetypes.py @@ -0,0 +1,130 @@ +import typing as t +from ast import literal_eval +from ast import parse +from itertools import chain +from itertools import islice +from types import GeneratorType + +from . import nodes +from .compiler import CodeGenerator +from .compiler import Frame +from .compiler import has_safe_repr +from .environment import Environment +from .environment import Template + + +def native_concat(values: t.Iterable[t.Any]) -> t.Optional[t.Any]: + """Return a native Python type from the list of compiled nodes. If + the result is a single node, its value is returned. Otherwise, the + nodes are concatenated as strings. If the result can be parsed with + :func:`ast.literal_eval`, the parsed value is returned. Otherwise, + the string is returned. + + :param values: Iterable of outputs to concatenate. + """ + head = list(islice(values, 2)) + + if not head: + return None + + if len(head) == 1: + raw = head[0] + if not isinstance(raw, str): + return raw + else: + if isinstance(values, GeneratorType): + values = chain(head, values) + raw = "".join([str(v) for v in values]) + + try: + return literal_eval( + # In Python 3.10+ ast.literal_eval removes leading spaces/tabs + # from the given string. For backwards compatibility we need to + # parse the string ourselves without removing leading spaces/tabs. + parse(raw, mode="eval") + ) + except (ValueError, SyntaxError, MemoryError): + return raw + + +class NativeCodeGenerator(CodeGenerator): + """A code generator which renders Python types by not adding + ``str()`` around output nodes. + """ + + @staticmethod + def _default_finalize(value: t.Any) -> t.Any: + return value + + def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: + return repr("".join([str(v) for v in group])) + + def _output_child_to_const( + self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo + ) -> t.Any: + const = node.as_const(frame.eval_ctx) + + if not has_safe_repr(const): + raise nodes.Impossible() + + if isinstance(node, nodes.TemplateData): + return const + + return finalize.const(const) # type: ignore + + def _output_child_pre( + self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo + ) -> None: + if finalize.src is not None: + self.write(finalize.src) + + def _output_child_post( + self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo + ) -> None: + if finalize.src is not None: + self.write(")") + + +class NativeEnvironment(Environment): + """An environment that renders templates to native Python types.""" + + code_generator_class = NativeCodeGenerator + concat = staticmethod(native_concat) # type: ignore + + +class NativeTemplate(Template): + environment_class = NativeEnvironment + + def render(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Render the template to produce a native Python type. If the + result is a single node, its value is returned. Otherwise, the + nodes are concatenated as strings. If the result can be parsed + with :func:`ast.literal_eval`, the parsed value is returned. + Otherwise, the string is returned. + """ + ctx = self.new_context(dict(*args, **kwargs)) + + try: + return self.environment_class.concat( # type: ignore + self.root_render_func(ctx) # type: ignore + ) + except Exception: + return self.environment.handle_exception() + + async def render_async(self, *args: t.Any, **kwargs: t.Any) -> t.Any: + if not self.environment.is_async: + raise RuntimeError( + "The environment was not created with async mode enabled." + ) + + ctx = self.new_context(dict(*args, **kwargs)) + + try: + return self.environment_class.concat( # type: ignore + [n async for n in self.root_render_func(ctx)] # type: ignore + ) + except Exception: + return self.environment.handle_exception() + + +NativeEnvironment.template_class = NativeTemplate diff --git a/backend/test/lib/python3.8/site-packages/jinja2/nodes.py b/backend/test/lib/python3.8/site-packages/jinja2/nodes.py new file mode 100644 index 0000000000000000000000000000000000000000..b2f88d9d9c19a2cb5d03b0158c743c6b947a29ea --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/nodes.py @@ -0,0 +1,1204 @@ +"""AST nodes generated by the parser for the compiler. Also provides +some node tree helper functions used by the parser and compiler in order +to normalize nodes. +""" +import inspect +import operator +import typing as t +from collections import deque + +from markupsafe import Markup + +from .utils import _PassArg + +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + +_NodeBound = t.TypeVar("_NodeBound", bound="Node") + +_binop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = { + "*": operator.mul, + "/": operator.truediv, + "//": operator.floordiv, + "**": operator.pow, + "%": operator.mod, + "+": operator.add, + "-": operator.sub, +} + +_uaop_to_func: t.Dict[str, t.Callable[[t.Any], t.Any]] = { + "not": operator.not_, + "+": operator.pos, + "-": operator.neg, +} + +_cmpop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = { + "eq": operator.eq, + "ne": operator.ne, + "gt": operator.gt, + "gteq": operator.ge, + "lt": operator.lt, + "lteq": operator.le, + "in": lambda a, b: a in b, + "notin": lambda a, b: a not in b, +} + + +class Impossible(Exception): + """Raised if the node could not perform a requested action.""" + + +class NodeType(type): + """A metaclass for nodes that handles the field and attribute + inheritance. fields and attributes from the parent class are + automatically forwarded to the child.""" + + def __new__(mcs, name, bases, d): # type: ignore + for attr in "fields", "attributes": + storage = [] + storage.extend(getattr(bases[0] if bases else object, attr, ())) + storage.extend(d.get(attr, ())) + assert len(bases) <= 1, "multiple inheritance not allowed" + assert len(storage) == len(set(storage)), "layout conflict" + d[attr] = tuple(storage) + d.setdefault("abstract", False) + return type.__new__(mcs, name, bases, d) + + +class EvalContext: + """Holds evaluation time information. Custom attributes can be attached + to it in extensions. + """ + + def __init__( + self, environment: "Environment", template_name: t.Optional[str] = None + ) -> None: + self.environment = environment + if callable(environment.autoescape): + self.autoescape = environment.autoescape(template_name) + else: + self.autoescape = environment.autoescape + self.volatile = False + + def save(self) -> t.Mapping[str, t.Any]: + return self.__dict__.copy() + + def revert(self, old: t.Mapping[str, t.Any]) -> None: + self.__dict__.clear() + self.__dict__.update(old) + + +def get_eval_context(node: "Node", ctx: t.Optional[EvalContext]) -> EvalContext: + if ctx is None: + if node.environment is None: + raise RuntimeError( + "if no eval context is passed, the node must have an" + " attached environment." + ) + return EvalContext(node.environment) + return ctx + + +class Node(metaclass=NodeType): + """Baseclass for all Jinja nodes. There are a number of nodes available + of different types. There are four major types: + + - :class:`Stmt`: statements + - :class:`Expr`: expressions + - :class:`Helper`: helper nodes + - :class:`Template`: the outermost wrapper node + + All nodes have fields and attributes. Fields may be other nodes, lists, + or arbitrary values. Fields are passed to the constructor as regular + positional arguments, attributes as keyword arguments. Each node has + two attributes: `lineno` (the line number of the node) and `environment`. + The `environment` attribute is set at the end of the parsing process for + all nodes automatically. + """ + + fields: t.Tuple[str, ...] = () + attributes: t.Tuple[str, ...] = ("lineno", "environment") + abstract = True + + lineno: int + environment: t.Optional["Environment"] + + def __init__(self, *fields: t.Any, **attributes: t.Any) -> None: + if self.abstract: + raise TypeError("abstract nodes are not instantiable") + if fields: + if len(fields) != len(self.fields): + if not self.fields: + raise TypeError(f"{type(self).__name__!r} takes 0 arguments") + raise TypeError( + f"{type(self).__name__!r} takes 0 or {len(self.fields)}" + f" argument{'s' if len(self.fields) != 1 else ''}" + ) + for name, arg in zip(self.fields, fields): + setattr(self, name, arg) + for attr in self.attributes: + setattr(self, attr, attributes.pop(attr, None)) + if attributes: + raise TypeError(f"unknown attribute {next(iter(attributes))!r}") + + def iter_fields( + self, + exclude: t.Optional[t.Container[str]] = None, + only: t.Optional[t.Container[str]] = None, + ) -> t.Iterator[t.Tuple[str, t.Any]]: + """This method iterates over all fields that are defined and yields + ``(key, value)`` tuples. Per default all fields are returned, but + it's possible to limit that to some fields by providing the `only` + parameter or to exclude some using the `exclude` parameter. Both + should be sets or tuples of field names. + """ + for name in self.fields: + if ( + (exclude is None and only is None) + or (exclude is not None and name not in exclude) + or (only is not None and name in only) + ): + try: + yield name, getattr(self, name) + except AttributeError: + pass + + def iter_child_nodes( + self, + exclude: t.Optional[t.Container[str]] = None, + only: t.Optional[t.Container[str]] = None, + ) -> t.Iterator["Node"]: + """Iterates over all direct child nodes of the node. This iterates + over all fields and yields the values of they are nodes. If the value + of a field is a list all the nodes in that list are returned. + """ + for _, item in self.iter_fields(exclude, only): + if isinstance(item, list): + for n in item: + if isinstance(n, Node): + yield n + elif isinstance(item, Node): + yield item + + def find(self, node_type: t.Type[_NodeBound]) -> t.Optional[_NodeBound]: + """Find the first node of a given type. If no such node exists the + return value is `None`. + """ + for result in self.find_all(node_type): + return result + + return None + + def find_all( + self, node_type: t.Union[t.Type[_NodeBound], t.Tuple[t.Type[_NodeBound], ...]] + ) -> t.Iterator[_NodeBound]: + """Find all the nodes of a given type. If the type is a tuple, + the check is performed for any of the tuple items. + """ + for child in self.iter_child_nodes(): + if isinstance(child, node_type): + yield child # type: ignore + yield from child.find_all(node_type) + + def set_ctx(self, ctx: str) -> "Node": + """Reset the context of a node and all child nodes. Per default the + parser will all generate nodes that have a 'load' context as it's the + most common one. This method is used in the parser to set assignment + targets and other nodes to a store context. + """ + todo = deque([self]) + while todo: + node = todo.popleft() + if "ctx" in node.fields: + node.ctx = ctx # type: ignore + todo.extend(node.iter_child_nodes()) + return self + + def set_lineno(self, lineno: int, override: bool = False) -> "Node": + """Set the line numbers of the node and children.""" + todo = deque([self]) + while todo: + node = todo.popleft() + if "lineno" in node.attributes: + if node.lineno is None or override: + node.lineno = lineno + todo.extend(node.iter_child_nodes()) + return self + + def set_environment(self, environment: "Environment") -> "Node": + """Set the environment for all nodes.""" + todo = deque([self]) + while todo: + node = todo.popleft() + node.environment = environment + todo.extend(node.iter_child_nodes()) + return self + + def __eq__(self, other: t.Any) -> bool: + if type(self) is not type(other): + return NotImplemented + + return tuple(self.iter_fields()) == tuple(other.iter_fields()) + + __hash__ = object.__hash__ + + def __repr__(self) -> str: + args_str = ", ".join(f"{a}={getattr(self, a, None)!r}" for a in self.fields) + return f"{type(self).__name__}({args_str})" + + def dump(self) -> str: + def _dump(node: t.Union[Node, t.Any]) -> None: + if not isinstance(node, Node): + buf.append(repr(node)) + return + + buf.append(f"nodes.{type(node).__name__}(") + if not node.fields: + buf.append(")") + return + for idx, field in enumerate(node.fields): + if idx: + buf.append(", ") + value = getattr(node, field) + if isinstance(value, list): + buf.append("[") + for idx, item in enumerate(value): + if idx: + buf.append(", ") + _dump(item) + buf.append("]") + else: + _dump(value) + buf.append(")") + + buf: t.List[str] = [] + _dump(self) + return "".join(buf) + + +class Stmt(Node): + """Base node for all statements.""" + + abstract = True + + +class Helper(Node): + """Nodes that exist in a specific context only.""" + + abstract = True + + +class Template(Node): + """Node that represents a template. This must be the outermost node that + is passed to the compiler. + """ + + fields = ("body",) + body: t.List[Node] + + +class Output(Stmt): + """A node that holds multiple expressions which are then printed out. + This is used both for the `print` statement and the regular template data. + """ + + fields = ("nodes",) + nodes: t.List["Expr"] + + +class Extends(Stmt): + """Represents an extends statement.""" + + fields = ("template",) + template: "Expr" + + +class For(Stmt): + """The for loop. `target` is the target for the iteration (usually a + :class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list + of nodes that are used as loop-body, and `else_` a list of nodes for the + `else` block. If no else node exists it has to be an empty list. + + For filtered nodes an expression can be stored as `test`, otherwise `None`. + """ + + fields = ("target", "iter", "body", "else_", "test", "recursive") + target: Node + iter: Node + body: t.List[Node] + else_: t.List[Node] + test: t.Optional[Node] + recursive: bool + + +class If(Stmt): + """If `test` is true, `body` is rendered, else `else_`.""" + + fields = ("test", "body", "elif_", "else_") + test: Node + body: t.List[Node] + elif_: t.List["If"] + else_: t.List[Node] + + +class Macro(Stmt): + """A macro definition. `name` is the name of the macro, `args` a list of + arguments and `defaults` a list of defaults if there are any. `body` is + a list of nodes for the macro body. + """ + + fields = ("name", "args", "defaults", "body") + name: str + args: t.List["Name"] + defaults: t.List["Expr"] + body: t.List[Node] + + +class CallBlock(Stmt): + """Like a macro without a name but a call instead. `call` is called with + the unnamed macro as `caller` argument this node holds. + """ + + fields = ("call", "args", "defaults", "body") + call: "Call" + args: t.List["Name"] + defaults: t.List["Expr"] + body: t.List[Node] + + +class FilterBlock(Stmt): + """Node for filter sections.""" + + fields = ("body", "filter") + body: t.List[Node] + filter: "Filter" + + +class With(Stmt): + """Specific node for with statements. In older versions of Jinja the + with statement was implemented on the base of the `Scope` node instead. + + .. versionadded:: 2.9.3 + """ + + fields = ("targets", "values", "body") + targets: t.List["Expr"] + values: t.List["Expr"] + body: t.List[Node] + + +class Block(Stmt): + """A node that represents a block. + + .. versionchanged:: 3.0.0 + the `required` field was added. + """ + + fields = ("name", "body", "scoped", "required") + name: str + body: t.List[Node] + scoped: bool + required: bool + + +class Include(Stmt): + """A node that represents the include tag.""" + + fields = ("template", "with_context", "ignore_missing") + template: "Expr" + with_context: bool + ignore_missing: bool + + +class Import(Stmt): + """A node that represents the import tag.""" + + fields = ("template", "target", "with_context") + template: "Expr" + target: str + with_context: bool + + +class FromImport(Stmt): + """A node that represents the from import tag. It's important to not + pass unsafe names to the name attribute. The compiler translates the + attribute lookups directly into getattr calls and does *not* use the + subscript callback of the interface. As exported variables may not + start with double underscores (which the parser asserts) this is not a + problem for regular Jinja code, but if this node is used in an extension + extra care must be taken. + + The list of names may contain tuples if aliases are wanted. + """ + + fields = ("template", "names", "with_context") + template: "Expr" + names: t.List[t.Union[str, t.Tuple[str, str]]] + with_context: bool + + +class ExprStmt(Stmt): + """A statement that evaluates an expression and discards the result.""" + + fields = ("node",) + node: Node + + +class Assign(Stmt): + """Assigns an expression to a target.""" + + fields = ("target", "node") + target: "Expr" + node: Node + + +class AssignBlock(Stmt): + """Assigns a block to a target.""" + + fields = ("target", "filter", "body") + target: "Expr" + filter: t.Optional["Filter"] + body: t.List[Node] + + +class Expr(Node): + """Baseclass for all expressions.""" + + abstract = True + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + """Return the value of the expression as constant or raise + :exc:`Impossible` if this was not possible. + + An :class:`EvalContext` can be provided, if none is given + a default context is created which requires the nodes to have + an attached environment. + + .. versionchanged:: 2.4 + the `eval_ctx` parameter was added. + """ + raise Impossible() + + def can_assign(self) -> bool: + """Check if it's possible to assign something to this node.""" + return False + + +class BinExpr(Expr): + """Baseclass for all binary expressions.""" + + fields = ("left", "right") + left: Expr + right: Expr + operator: str + abstract = True + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + eval_ctx = get_eval_context(self, eval_ctx) + + # intercepted operators cannot be folded at compile time + if ( + eval_ctx.environment.sandboxed + and self.operator in eval_ctx.environment.intercepted_binops # type: ignore + ): + raise Impossible() + f = _binop_to_func[self.operator] + try: + return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx)) + except Exception as e: + raise Impossible() from e + + +class UnaryExpr(Expr): + """Baseclass for all unary expressions.""" + + fields = ("node",) + node: Expr + operator: str + abstract = True + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + eval_ctx = get_eval_context(self, eval_ctx) + + # intercepted operators cannot be folded at compile time + if ( + eval_ctx.environment.sandboxed + and self.operator in eval_ctx.environment.intercepted_unops # type: ignore + ): + raise Impossible() + f = _uaop_to_func[self.operator] + try: + return f(self.node.as_const(eval_ctx)) + except Exception as e: + raise Impossible() from e + + +class Name(Expr): + """Looks up a name or stores a value in a name. + The `ctx` of the node can be one of the following values: + + - `store`: store a value in the name + - `load`: load that name + - `param`: like `store` but if the name was defined as function parameter. + """ + + fields = ("name", "ctx") + name: str + ctx: str + + def can_assign(self) -> bool: + return self.name not in {"true", "false", "none", "True", "False", "None"} + + +class NSRef(Expr): + """Reference to a namespace value assignment""" + + fields = ("name", "attr") + name: str + attr: str + + def can_assign(self) -> bool: + # We don't need any special checks here; NSRef assignments have a + # runtime check to ensure the target is a namespace object which will + # have been checked already as it is created using a normal assignment + # which goes through a `Name` node. + return True + + +class Literal(Expr): + """Baseclass for literals.""" + + abstract = True + + +class Const(Literal): + """All constant values. The parser will return this node for simple + constants such as ``42`` or ``"foo"`` but it can be used to store more + complex values such as lists too. Only constants with a safe + representation (objects where ``eval(repr(x)) == x`` is true). + """ + + fields = ("value",) + value: t.Any + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + return self.value + + @classmethod + def from_untrusted( + cls, + value: t.Any, + lineno: t.Optional[int] = None, + environment: "t.Optional[Environment]" = None, + ) -> "Const": + """Return a const object if the value is representable as + constant value in the generated code, otherwise it will raise + an `Impossible` exception. + """ + from .compiler import has_safe_repr + + if not has_safe_repr(value): + raise Impossible() + return cls(value, lineno=lineno, environment=environment) + + +class TemplateData(Literal): + """A constant template string.""" + + fields = ("data",) + data: str + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str: + eval_ctx = get_eval_context(self, eval_ctx) + if eval_ctx.volatile: + raise Impossible() + if eval_ctx.autoescape: + return Markup(self.data) + return self.data + + +class Tuple(Literal): + """For loop unpacking and some other things like multiple arguments + for subscripts. Like for :class:`Name` `ctx` specifies if the tuple + is used for loading the names or storing. + """ + + fields = ("items", "ctx") + items: t.List[Expr] + ctx: str + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[t.Any, ...]: + eval_ctx = get_eval_context(self, eval_ctx) + return tuple(x.as_const(eval_ctx) for x in self.items) + + def can_assign(self) -> bool: + for item in self.items: + if not item.can_assign(): + return False + return True + + +class List(Literal): + """Any list literal such as ``[1, 2, 3]``""" + + fields = ("items",) + items: t.List[Expr] + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.List[t.Any]: + eval_ctx = get_eval_context(self, eval_ctx) + return [x.as_const(eval_ctx) for x in self.items] + + +class Dict(Literal): + """Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of + :class:`Pair` nodes. + """ + + fields = ("items",) + items: t.List["Pair"] + + def as_const( + self, eval_ctx: t.Optional[EvalContext] = None + ) -> t.Dict[t.Any, t.Any]: + eval_ctx = get_eval_context(self, eval_ctx) + return dict(x.as_const(eval_ctx) for x in self.items) + + +class Pair(Helper): + """A key, value pair for dicts.""" + + fields = ("key", "value") + key: Expr + value: Expr + + def as_const( + self, eval_ctx: t.Optional[EvalContext] = None + ) -> t.Tuple[t.Any, t.Any]: + eval_ctx = get_eval_context(self, eval_ctx) + return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx) + + +class Keyword(Helper): + """A key, value pair for keyword arguments where key is a string.""" + + fields = ("key", "value") + key: str + value: Expr + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[str, t.Any]: + eval_ctx = get_eval_context(self, eval_ctx) + return self.key, self.value.as_const(eval_ctx) + + +class CondExpr(Expr): + """A conditional expression (inline if expression). (``{{ + foo if bar else baz }}``) + """ + + fields = ("test", "expr1", "expr2") + test: Expr + expr1: Expr + expr2: t.Optional[Expr] + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + eval_ctx = get_eval_context(self, eval_ctx) + if self.test.as_const(eval_ctx): + return self.expr1.as_const(eval_ctx) + + # if we evaluate to an undefined object, we better do that at runtime + if self.expr2 is None: + raise Impossible() + + return self.expr2.as_const(eval_ctx) + + +def args_as_const( + node: t.Union["_FilterTestCommon", "Call"], eval_ctx: t.Optional[EvalContext] +) -> t.Tuple[t.List[t.Any], t.Dict[t.Any, t.Any]]: + args = [x.as_const(eval_ctx) for x in node.args] + kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs) + + if node.dyn_args is not None: + try: + args.extend(node.dyn_args.as_const(eval_ctx)) + except Exception as e: + raise Impossible() from e + + if node.dyn_kwargs is not None: + try: + kwargs.update(node.dyn_kwargs.as_const(eval_ctx)) + except Exception as e: + raise Impossible() from e + + return args, kwargs + + +class _FilterTestCommon(Expr): + fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs") + node: Expr + name: str + args: t.List[Expr] + kwargs: t.List[Pair] + dyn_args: t.Optional[Expr] + dyn_kwargs: t.Optional[Expr] + abstract = True + _is_filter = True + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + eval_ctx = get_eval_context(self, eval_ctx) + + if eval_ctx.volatile: + raise Impossible() + + if self._is_filter: + env_map = eval_ctx.environment.filters + else: + env_map = eval_ctx.environment.tests + + func = env_map.get(self.name) + pass_arg = _PassArg.from_obj(func) # type: ignore + + if func is None or pass_arg is _PassArg.context: + raise Impossible() + + if eval_ctx.environment.is_async and ( + getattr(func, "jinja_async_variant", False) is True + or inspect.iscoroutinefunction(func) + ): + raise Impossible() + + args, kwargs = args_as_const(self, eval_ctx) + args.insert(0, self.node.as_const(eval_ctx)) + + if pass_arg is _PassArg.eval_context: + args.insert(0, eval_ctx) + elif pass_arg is _PassArg.environment: + args.insert(0, eval_ctx.environment) + + try: + return func(*args, **kwargs) + except Exception as e: + raise Impossible() from e + + +class Filter(_FilterTestCommon): + """Apply a filter to an expression. ``name`` is the name of the + filter, the other fields are the same as :class:`Call`. + + If ``node`` is ``None``, the filter is being used in a filter block + and is applied to the content of the block. + """ + + node: t.Optional[Expr] # type: ignore + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + if self.node is None: + raise Impossible() + + return super().as_const(eval_ctx=eval_ctx) + + +class Test(_FilterTestCommon): + """Apply a test to an expression. ``name`` is the name of the test, + the other field are the same as :class:`Call`. + + .. versionchanged:: 3.0 + ``as_const`` shares the same logic for filters and tests. Tests + check for volatile, async, and ``@pass_context`` etc. + decorators. + """ + + _is_filter = False + + +class Call(Expr): + """Calls an expression. `args` is a list of arguments, `kwargs` a list + of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args` + and `dyn_kwargs` has to be either `None` or a node that is used as + node for dynamic positional (``*args``) or keyword (``**kwargs``) + arguments. + """ + + fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs") + node: Expr + args: t.List[Expr] + kwargs: t.List[Keyword] + dyn_args: t.Optional[Expr] + dyn_kwargs: t.Optional[Expr] + + +class Getitem(Expr): + """Get an attribute or item from an expression and prefer the item.""" + + fields = ("node", "arg", "ctx") + node: Expr + arg: Expr + ctx: str + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + if self.ctx != "load": + raise Impossible() + + eval_ctx = get_eval_context(self, eval_ctx) + + try: + return eval_ctx.environment.getitem( + self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx) + ) + except Exception as e: + raise Impossible() from e + + +class Getattr(Expr): + """Get an attribute or item from an expression that is a ascii-only + bytestring and prefer the attribute. + """ + + fields = ("node", "attr", "ctx") + node: Expr + attr: str + ctx: str + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + if self.ctx != "load": + raise Impossible() + + eval_ctx = get_eval_context(self, eval_ctx) + + try: + return eval_ctx.environment.getattr(self.node.as_const(eval_ctx), self.attr) + except Exception as e: + raise Impossible() from e + + +class Slice(Expr): + """Represents a slice object. This must only be used as argument for + :class:`Subscript`. + """ + + fields = ("start", "stop", "step") + start: t.Optional[Expr] + stop: t.Optional[Expr] + step: t.Optional[Expr] + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> slice: + eval_ctx = get_eval_context(self, eval_ctx) + + def const(obj: t.Optional[Expr]) -> t.Optional[t.Any]: + if obj is None: + return None + return obj.as_const(eval_ctx) + + return slice(const(self.start), const(self.stop), const(self.step)) + + +class Concat(Expr): + """Concatenates the list of expressions provided after converting + them to strings. + """ + + fields = ("nodes",) + nodes: t.List[Expr] + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str: + eval_ctx = get_eval_context(self, eval_ctx) + return "".join(str(x.as_const(eval_ctx)) for x in self.nodes) + + +class Compare(Expr): + """Compares an expression with some other expressions. `ops` must be a + list of :class:`Operand`\\s. + """ + + fields = ("expr", "ops") + expr: Expr + ops: t.List["Operand"] + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + eval_ctx = get_eval_context(self, eval_ctx) + result = value = self.expr.as_const(eval_ctx) + + try: + for op in self.ops: + new_value = op.expr.as_const(eval_ctx) + result = _cmpop_to_func[op.op](value, new_value) + + if not result: + return False + + value = new_value + except Exception as e: + raise Impossible() from e + + return result + + +class Operand(Helper): + """Holds an operator and an expression.""" + + fields = ("op", "expr") + op: str + expr: Expr + + +class Mul(BinExpr): + """Multiplies the left with the right node.""" + + operator = "*" + + +class Div(BinExpr): + """Divides the left by the right node.""" + + operator = "/" + + +class FloorDiv(BinExpr): + """Divides the left by the right node and converts the + result into an integer by truncating. + """ + + operator = "//" + + +class Add(BinExpr): + """Add the left to the right node.""" + + operator = "+" + + +class Sub(BinExpr): + """Subtract the right from the left node.""" + + operator = "-" + + +class Mod(BinExpr): + """Left modulo right.""" + + operator = "%" + + +class Pow(BinExpr): + """Left to the power of right.""" + + operator = "**" + + +class And(BinExpr): + """Short circuited AND.""" + + operator = "and" + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + eval_ctx = get_eval_context(self, eval_ctx) + return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx) + + +class Or(BinExpr): + """Short circuited OR.""" + + operator = "or" + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any: + eval_ctx = get_eval_context(self, eval_ctx) + return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx) + + +class Not(UnaryExpr): + """Negate the expression.""" + + operator = "not" + + +class Neg(UnaryExpr): + """Make the expression negative.""" + + operator = "-" + + +class Pos(UnaryExpr): + """Make the expression positive (noop for most expressions)""" + + operator = "+" + + +# Helpers for extensions + + +class EnvironmentAttribute(Expr): + """Loads an attribute from the environment object. This is useful for + extensions that want to call a callback stored on the environment. + """ + + fields = ("name",) + name: str + + +class ExtensionAttribute(Expr): + """Returns the attribute of an extension bound to the environment. + The identifier is the identifier of the :class:`Extension`. + + This node is usually constructed by calling the + :meth:`~jinja2.ext.Extension.attr` method on an extension. + """ + + fields = ("identifier", "name") + identifier: str + name: str + + +class ImportedName(Expr): + """If created with an import name the import name is returned on node + access. For example ``ImportedName('cgi.escape')`` returns the `escape` + function from the cgi module on evaluation. Imports are optimized by the + compiler so there is no need to assign them to local variables. + """ + + fields = ("importname",) + importname: str + + +class InternalName(Expr): + """An internal name in the compiler. You cannot create these nodes + yourself but the parser provides a + :meth:`~jinja2.parser.Parser.free_identifier` method that creates + a new identifier for you. This identifier is not available from the + template and is not treated specially by the compiler. + """ + + fields = ("name",) + name: str + + def __init__(self) -> None: + raise TypeError( + "Can't create internal names. Use the " + "`free_identifier` method on a parser." + ) + + +class MarkSafe(Expr): + """Mark the wrapped expression as safe (wrap it as `Markup`).""" + + fields = ("expr",) + expr: Expr + + def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> Markup: + eval_ctx = get_eval_context(self, eval_ctx) + return Markup(self.expr.as_const(eval_ctx)) + + +class MarkSafeIfAutoescape(Expr): + """Mark the wrapped expression as safe (wrap it as `Markup`) but + only if autoescaping is active. + + .. versionadded:: 2.5 + """ + + fields = ("expr",) + expr: Expr + + def as_const( + self, eval_ctx: t.Optional[EvalContext] = None + ) -> t.Union[Markup, t.Any]: + eval_ctx = get_eval_context(self, eval_ctx) + if eval_ctx.volatile: + raise Impossible() + expr = self.expr.as_const(eval_ctx) + if eval_ctx.autoescape: + return Markup(expr) + return expr + + +class ContextReference(Expr): + """Returns the current template context. It can be used like a + :class:`Name` node, with a ``'load'`` ctx and will return the + current :class:`~jinja2.runtime.Context` object. + + Here an example that assigns the current template name to a + variable named `foo`:: + + Assign(Name('foo', ctx='store'), + Getattr(ContextReference(), 'name')) + + This is basically equivalent to using the + :func:`~jinja2.pass_context` decorator when using the high-level + API, which causes a reference to the context to be passed as the + first argument to a function. + """ + + +class DerivedContextReference(Expr): + """Return the current template context including locals. Behaves + exactly like :class:`ContextReference`, but includes local + variables, such as from a ``for`` loop. + + .. versionadded:: 2.11 + """ + + +class Continue(Stmt): + """Continue a loop.""" + + +class Break(Stmt): + """Break a loop.""" + + +class Scope(Stmt): + """An artificial scope.""" + + fields = ("body",) + body: t.List[Node] + + +class OverlayScope(Stmt): + """An overlay scope for extensions. This is a largely unoptimized scope + that however can be used to introduce completely arbitrary variables into + a sub scope from a dictionary or dictionary like object. The `context` + field has to evaluate to a dictionary object. + + Example usage:: + + OverlayScope(context=self.call_method('get_context'), + body=[...]) + + .. versionadded:: 2.10 + """ + + fields = ("context", "body") + context: Expr + body: t.List[Node] + + +class EvalContextModifier(Stmt): + """Modifies the eval context. For each option that should be modified, + a :class:`Keyword` has to be added to the :attr:`options` list. + + Example to change the `autoescape` setting:: + + EvalContextModifier(options=[Keyword('autoescape', Const(True))]) + """ + + fields = ("options",) + options: t.List[Keyword] + + +class ScopedEvalContextModifier(EvalContextModifier): + """Modifies the eval context and reverts it later. Works exactly like + :class:`EvalContextModifier` but will only modify the + :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`. + """ + + fields = ("body",) + body: t.List[Node] + + +# make sure nobody creates custom nodes +def _failing_new(*args: t.Any, **kwargs: t.Any) -> "te.NoReturn": + raise TypeError("can't create custom node types") + + +NodeType.__new__ = staticmethod(_failing_new) # type: ignore +del _failing_new diff --git a/backend/test/lib/python3.8/site-packages/jinja2/optimizer.py b/backend/test/lib/python3.8/site-packages/jinja2/optimizer.py new file mode 100644 index 0000000000000000000000000000000000000000..fe1010705e7b29d4fa1900b3a0438ab93d7b582c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/optimizer.py @@ -0,0 +1,47 @@ +"""The optimizer tries to constant fold expressions and modify the AST +in place so that it should be faster to evaluate. + +Because the AST does not contain all the scoping information and the +compiler has to find that out, we cannot do all the optimizations we +want. For example, loop unrolling doesn't work because unrolled loops +would have a different scope. The solution would be a second syntax tree +that stored the scoping rules. +""" +import typing as t + +from . import nodes +from .visitor import NodeTransformer + +if t.TYPE_CHECKING: + from .environment import Environment + + +def optimize(node: nodes.Node, environment: "Environment") -> nodes.Node: + """The context hint can be used to perform an static optimization + based on the context given.""" + optimizer = Optimizer(environment) + return t.cast(nodes.Node, optimizer.visit(node)) + + +class Optimizer(NodeTransformer): + def __init__(self, environment: "t.Optional[Environment]") -> None: + self.environment = environment + + def generic_visit( + self, node: nodes.Node, *args: t.Any, **kwargs: t.Any + ) -> nodes.Node: + node = super().generic_visit(node, *args, **kwargs) + + # Do constant folding. Some other nodes besides Expr have + # as_const, but folding them causes errors later on. + if isinstance(node, nodes.Expr): + try: + return nodes.Const.from_untrusted( + node.as_const(args[0] if args else None), + lineno=node.lineno, + environment=self.environment, + ) + except nodes.Impossible: + pass + + return node diff --git a/backend/test/lib/python3.8/site-packages/jinja2/parser.py b/backend/test/lib/python3.8/site-packages/jinja2/parser.py new file mode 100644 index 0000000000000000000000000000000000000000..cefce2dfa1d2a4171838b0d0135af8ea3ff7d62c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/parser.py @@ -0,0 +1,1032 @@ +"""Parse tokens from the lexer into nodes for the compiler.""" +import typing +import typing as t + +from . import nodes +from .exceptions import TemplateAssertionError +from .exceptions import TemplateSyntaxError +from .lexer import describe_token +from .lexer import describe_token_expr + +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + +_ImportInclude = t.TypeVar("_ImportInclude", nodes.Import, nodes.Include) +_MacroCall = t.TypeVar("_MacroCall", nodes.Macro, nodes.CallBlock) + +_statement_keywords = frozenset( + [ + "for", + "if", + "block", + "extends", + "print", + "macro", + "include", + "from", + "import", + "set", + "with", + "autoescape", + ] +) +_compare_operators = frozenset(["eq", "ne", "lt", "lteq", "gt", "gteq"]) + +_math_nodes: t.Dict[str, t.Type[nodes.Expr]] = { + "add": nodes.Add, + "sub": nodes.Sub, + "mul": nodes.Mul, + "div": nodes.Div, + "floordiv": nodes.FloorDiv, + "mod": nodes.Mod, +} + + +class Parser: + """This is the central parsing class Jinja uses. It's passed to + extensions and can be used to parse expressions or statements. + """ + + def __init__( + self, + environment: "Environment", + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> None: + self.environment = environment + self.stream = environment._tokenize(source, name, filename, state) + self.name = name + self.filename = filename + self.closed = False + self.extensions: t.Dict[ + str, t.Callable[["Parser"], t.Union[nodes.Node, t.List[nodes.Node]]] + ] = {} + for extension in environment.iter_extensions(): + for tag in extension.tags: + self.extensions[tag] = extension.parse + self._last_identifier = 0 + self._tag_stack: t.List[str] = [] + self._end_token_stack: t.List[t.Tuple[str, ...]] = [] + + def fail( + self, + msg: str, + lineno: t.Optional[int] = None, + exc: t.Type[TemplateSyntaxError] = TemplateSyntaxError, + ) -> "te.NoReturn": + """Convenience method that raises `exc` with the message, passed + line number or last line number as well as the current name and + filename. + """ + if lineno is None: + lineno = self.stream.current.lineno + raise exc(msg, lineno, self.name, self.filename) + + def _fail_ut_eof( + self, + name: t.Optional[str], + end_token_stack: t.List[t.Tuple[str, ...]], + lineno: t.Optional[int], + ) -> "te.NoReturn": + expected: t.Set[str] = set() + for exprs in end_token_stack: + expected.update(map(describe_token_expr, exprs)) + if end_token_stack: + currently_looking: t.Optional[str] = " or ".join( + map(repr, map(describe_token_expr, end_token_stack[-1])) + ) + else: + currently_looking = None + + if name is None: + message = ["Unexpected end of template."] + else: + message = [f"Encountered unknown tag {name!r}."] + + if currently_looking: + if name is not None and name in expected: + message.append( + "You probably made a nesting mistake. Jinja is expecting this tag," + f" but currently looking for {currently_looking}." + ) + else: + message.append( + f"Jinja was looking for the following tags: {currently_looking}." + ) + + if self._tag_stack: + message.append( + "The innermost block that needs to be closed is" + f" {self._tag_stack[-1]!r}." + ) + + self.fail(" ".join(message), lineno) + + def fail_unknown_tag( + self, name: str, lineno: t.Optional[int] = None + ) -> "te.NoReturn": + """Called if the parser encounters an unknown tag. Tries to fail + with a human readable error message that could help to identify + the problem. + """ + self._fail_ut_eof(name, self._end_token_stack, lineno) + + def fail_eof( + self, + end_tokens: t.Optional[t.Tuple[str, ...]] = None, + lineno: t.Optional[int] = None, + ) -> "te.NoReturn": + """Like fail_unknown_tag but for end of template situations.""" + stack = list(self._end_token_stack) + if end_tokens is not None: + stack.append(end_tokens) + self._fail_ut_eof(None, stack, lineno) + + def is_tuple_end( + self, extra_end_rules: t.Optional[t.Tuple[str, ...]] = None + ) -> bool: + """Are we at the end of a tuple?""" + if self.stream.current.type in ("variable_end", "block_end", "rparen"): + return True + elif extra_end_rules is not None: + return self.stream.current.test_any(extra_end_rules) # type: ignore + return False + + def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName: + """Return a new free identifier as :class:`~jinja2.nodes.InternalName`.""" + self._last_identifier += 1 + rv = object.__new__(nodes.InternalName) + nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno) + return rv + + def parse_statement(self) -> t.Union[nodes.Node, t.List[nodes.Node]]: + """Parse a single statement.""" + token = self.stream.current + if token.type != "name": + self.fail("tag name expected", token.lineno) + self._tag_stack.append(token.value) + pop_tag = True + try: + if token.value in _statement_keywords: + f = getattr(self, f"parse_{self.stream.current.value}") + return f() # type: ignore + if token.value == "call": + return self.parse_call_block() + if token.value == "filter": + return self.parse_filter_block() + ext = self.extensions.get(token.value) + if ext is not None: + return ext(self) + + # did not work out, remove the token we pushed by accident + # from the stack so that the unknown tag fail function can + # produce a proper error message. + self._tag_stack.pop() + pop_tag = False + self.fail_unknown_tag(token.value, token.lineno) + finally: + if pop_tag: + self._tag_stack.pop() + + def parse_statements( + self, end_tokens: t.Tuple[str, ...], drop_needle: bool = False + ) -> t.List[nodes.Node]: + """Parse multiple statements into a list until one of the end tokens + is reached. This is used to parse the body of statements as it also + parses template data if appropriate. The parser checks first if the + current token is a colon and skips it if there is one. Then it checks + for the block end and parses until if one of the `end_tokens` is + reached. Per default the active token in the stream at the end of + the call is the matched end token. If this is not wanted `drop_needle` + can be set to `True` and the end token is removed. + """ + # the first token may be a colon for python compatibility + self.stream.skip_if("colon") + + # in the future it would be possible to add whole code sections + # by adding some sort of end of statement token and parsing those here. + self.stream.expect("block_end") + result = self.subparse(end_tokens) + + # we reached the end of the template too early, the subparser + # does not check for this, so we do that now + if self.stream.current.type == "eof": + self.fail_eof(end_tokens) + + if drop_needle: + next(self.stream) + return result + + def parse_set(self) -> t.Union[nodes.Assign, nodes.AssignBlock]: + """Parse an assign statement.""" + lineno = next(self.stream).lineno + target = self.parse_assign_target(with_namespace=True) + if self.stream.skip_if("assign"): + expr = self.parse_tuple() + return nodes.Assign(target, expr, lineno=lineno) + filter_node = self.parse_filter(None) + body = self.parse_statements(("name:endset",), drop_needle=True) + return nodes.AssignBlock(target, filter_node, body, lineno=lineno) + + def parse_for(self) -> nodes.For: + """Parse a for loop.""" + lineno = self.stream.expect("name:for").lineno + target = self.parse_assign_target(extra_end_rules=("name:in",)) + self.stream.expect("name:in") + iter = self.parse_tuple( + with_condexpr=False, extra_end_rules=("name:recursive",) + ) + test = None + if self.stream.skip_if("name:if"): + test = self.parse_expression() + recursive = self.stream.skip_if("name:recursive") + body = self.parse_statements(("name:endfor", "name:else")) + if next(self.stream).value == "endfor": + else_ = [] + else: + else_ = self.parse_statements(("name:endfor",), drop_needle=True) + return nodes.For(target, iter, body, else_, test, recursive, lineno=lineno) + + def parse_if(self) -> nodes.If: + """Parse an if construct.""" + node = result = nodes.If(lineno=self.stream.expect("name:if").lineno) + while True: + node.test = self.parse_tuple(with_condexpr=False) + node.body = self.parse_statements(("name:elif", "name:else", "name:endif")) + node.elif_ = [] + node.else_ = [] + token = next(self.stream) + if token.test("name:elif"): + node = nodes.If(lineno=self.stream.current.lineno) + result.elif_.append(node) + continue + elif token.test("name:else"): + result.else_ = self.parse_statements(("name:endif",), drop_needle=True) + break + return result + + def parse_with(self) -> nodes.With: + node = nodes.With(lineno=next(self.stream).lineno) + targets: t.List[nodes.Expr] = [] + values: t.List[nodes.Expr] = [] + while self.stream.current.type != "block_end": + if targets: + self.stream.expect("comma") + target = self.parse_assign_target() + target.set_ctx("param") + targets.append(target) + self.stream.expect("assign") + values.append(self.parse_expression()) + node.targets = targets + node.values = values + node.body = self.parse_statements(("name:endwith",), drop_needle=True) + return node + + def parse_autoescape(self) -> nodes.Scope: + node = nodes.ScopedEvalContextModifier(lineno=next(self.stream).lineno) + node.options = [nodes.Keyword("autoescape", self.parse_expression())] + node.body = self.parse_statements(("name:endautoescape",), drop_needle=True) + return nodes.Scope([node]) + + def parse_block(self) -> nodes.Block: + node = nodes.Block(lineno=next(self.stream).lineno) + node.name = self.stream.expect("name").value + node.scoped = self.stream.skip_if("name:scoped") + node.required = self.stream.skip_if("name:required") + + # common problem people encounter when switching from django + # to jinja. we do not support hyphens in block names, so let's + # raise a nicer error message in that case. + if self.stream.current.type == "sub": + self.fail( + "Block names in Jinja have to be valid Python identifiers and may not" + " contain hyphens, use an underscore instead." + ) + + node.body = self.parse_statements(("name:endblock",), drop_needle=True) + + # enforce that required blocks only contain whitespace or comments + # by asserting that the body, if not empty, is just TemplateData nodes + # with whitespace data + if node.required and not all( + isinstance(child, nodes.TemplateData) and child.data.isspace() + for body in node.body + for child in body.nodes # type: ignore + ): + self.fail("Required blocks can only contain comments or whitespace") + + self.stream.skip_if("name:" + node.name) + return node + + def parse_extends(self) -> nodes.Extends: + node = nodes.Extends(lineno=next(self.stream).lineno) + node.template = self.parse_expression() + return node + + def parse_import_context( + self, node: _ImportInclude, default: bool + ) -> _ImportInclude: + if self.stream.current.test_any( + "name:with", "name:without" + ) and self.stream.look().test("name:context"): + node.with_context = next(self.stream).value == "with" + self.stream.skip() + else: + node.with_context = default + return node + + def parse_include(self) -> nodes.Include: + node = nodes.Include(lineno=next(self.stream).lineno) + node.template = self.parse_expression() + if self.stream.current.test("name:ignore") and self.stream.look().test( + "name:missing" + ): + node.ignore_missing = True + self.stream.skip(2) + else: + node.ignore_missing = False + return self.parse_import_context(node, True) + + def parse_import(self) -> nodes.Import: + node = nodes.Import(lineno=next(self.stream).lineno) + node.template = self.parse_expression() + self.stream.expect("name:as") + node.target = self.parse_assign_target(name_only=True).name + return self.parse_import_context(node, False) + + def parse_from(self) -> nodes.FromImport: + node = nodes.FromImport(lineno=next(self.stream).lineno) + node.template = self.parse_expression() + self.stream.expect("name:import") + node.names = [] + + def parse_context() -> bool: + if self.stream.current.value in { + "with", + "without", + } and self.stream.look().test("name:context"): + node.with_context = next(self.stream).value == "with" + self.stream.skip() + return True + return False + + while True: + if node.names: + self.stream.expect("comma") + if self.stream.current.type == "name": + if parse_context(): + break + target = self.parse_assign_target(name_only=True) + if target.name.startswith("_"): + self.fail( + "names starting with an underline can not be imported", + target.lineno, + exc=TemplateAssertionError, + ) + if self.stream.skip_if("name:as"): + alias = self.parse_assign_target(name_only=True) + node.names.append((target.name, alias.name)) + else: + node.names.append(target.name) + if parse_context() or self.stream.current.type != "comma": + break + else: + self.stream.expect("name") + if not hasattr(node, "with_context"): + node.with_context = False + return node + + def parse_signature(self, node: _MacroCall) -> None: + args = node.args = [] + defaults = node.defaults = [] + self.stream.expect("lparen") + while self.stream.current.type != "rparen": + if args: + self.stream.expect("comma") + arg = self.parse_assign_target(name_only=True) + arg.set_ctx("param") + if self.stream.skip_if("assign"): + defaults.append(self.parse_expression()) + elif defaults: + self.fail("non-default argument follows default argument") + args.append(arg) + self.stream.expect("rparen") + + def parse_call_block(self) -> nodes.CallBlock: + node = nodes.CallBlock(lineno=next(self.stream).lineno) + if self.stream.current.type == "lparen": + self.parse_signature(node) + else: + node.args = [] + node.defaults = [] + + call_node = self.parse_expression() + if not isinstance(call_node, nodes.Call): + self.fail("expected call", node.lineno) + node.call = call_node + node.body = self.parse_statements(("name:endcall",), drop_needle=True) + return node + + def parse_filter_block(self) -> nodes.FilterBlock: + node = nodes.FilterBlock(lineno=next(self.stream).lineno) + node.filter = self.parse_filter(None, start_inline=True) # type: ignore + node.body = self.parse_statements(("name:endfilter",), drop_needle=True) + return node + + def parse_macro(self) -> nodes.Macro: + node = nodes.Macro(lineno=next(self.stream).lineno) + node.name = self.parse_assign_target(name_only=True).name + self.parse_signature(node) + node.body = self.parse_statements(("name:endmacro",), drop_needle=True) + return node + + def parse_print(self) -> nodes.Output: + node = nodes.Output(lineno=next(self.stream).lineno) + node.nodes = [] + while self.stream.current.type != "block_end": + if node.nodes: + self.stream.expect("comma") + node.nodes.append(self.parse_expression()) + return node + + @typing.overload + def parse_assign_target( + self, with_tuple: bool = ..., name_only: "te.Literal[True]" = ... + ) -> nodes.Name: + ... + + @typing.overload + def parse_assign_target( + self, + with_tuple: bool = True, + name_only: bool = False, + extra_end_rules: t.Optional[t.Tuple[str, ...]] = None, + with_namespace: bool = False, + ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]: + ... + + def parse_assign_target( + self, + with_tuple: bool = True, + name_only: bool = False, + extra_end_rules: t.Optional[t.Tuple[str, ...]] = None, + with_namespace: bool = False, + ) -> t.Union[nodes.NSRef, nodes.Name, nodes.Tuple]: + """Parse an assignment target. As Jinja allows assignments to + tuples, this function can parse all allowed assignment targets. Per + default assignments to tuples are parsed, that can be disable however + by setting `with_tuple` to `False`. If only assignments to names are + wanted `name_only` can be set to `True`. The `extra_end_rules` + parameter is forwarded to the tuple parsing function. If + `with_namespace` is enabled, a namespace assignment may be parsed. + """ + target: nodes.Expr + + if with_namespace and self.stream.look().type == "dot": + token = self.stream.expect("name") + next(self.stream) # dot + attr = self.stream.expect("name") + target = nodes.NSRef(token.value, attr.value, lineno=token.lineno) + elif name_only: + token = self.stream.expect("name") + target = nodes.Name(token.value, "store", lineno=token.lineno) + else: + if with_tuple: + target = self.parse_tuple( + simplified=True, extra_end_rules=extra_end_rules + ) + else: + target = self.parse_primary() + + target.set_ctx("store") + + if not target.can_assign(): + self.fail( + f"can't assign to {type(target).__name__.lower()!r}", target.lineno + ) + + return target # type: ignore + + def parse_expression(self, with_condexpr: bool = True) -> nodes.Expr: + """Parse an expression. Per default all expressions are parsed, if + the optional `with_condexpr` parameter is set to `False` conditional + expressions are not parsed. + """ + if with_condexpr: + return self.parse_condexpr() + return self.parse_or() + + def parse_condexpr(self) -> nodes.Expr: + lineno = self.stream.current.lineno + expr1 = self.parse_or() + expr3: t.Optional[nodes.Expr] + + while self.stream.skip_if("name:if"): + expr2 = self.parse_or() + if self.stream.skip_if("name:else"): + expr3 = self.parse_condexpr() + else: + expr3 = None + expr1 = nodes.CondExpr(expr2, expr1, expr3, lineno=lineno) + lineno = self.stream.current.lineno + return expr1 + + def parse_or(self) -> nodes.Expr: + lineno = self.stream.current.lineno + left = self.parse_and() + while self.stream.skip_if("name:or"): + right = self.parse_and() + left = nodes.Or(left, right, lineno=lineno) + lineno = self.stream.current.lineno + return left + + def parse_and(self) -> nodes.Expr: + lineno = self.stream.current.lineno + left = self.parse_not() + while self.stream.skip_if("name:and"): + right = self.parse_not() + left = nodes.And(left, right, lineno=lineno) + lineno = self.stream.current.lineno + return left + + def parse_not(self) -> nodes.Expr: + if self.stream.current.test("name:not"): + lineno = next(self.stream).lineno + return nodes.Not(self.parse_not(), lineno=lineno) + return self.parse_compare() + + def parse_compare(self) -> nodes.Expr: + lineno = self.stream.current.lineno + expr = self.parse_math1() + ops = [] + while True: + token_type = self.stream.current.type + if token_type in _compare_operators: + next(self.stream) + ops.append(nodes.Operand(token_type, self.parse_math1())) + elif self.stream.skip_if("name:in"): + ops.append(nodes.Operand("in", self.parse_math1())) + elif self.stream.current.test("name:not") and self.stream.look().test( + "name:in" + ): + self.stream.skip(2) + ops.append(nodes.Operand("notin", self.parse_math1())) + else: + break + lineno = self.stream.current.lineno + if not ops: + return expr + return nodes.Compare(expr, ops, lineno=lineno) + + def parse_math1(self) -> nodes.Expr: + lineno = self.stream.current.lineno + left = self.parse_concat() + while self.stream.current.type in ("add", "sub"): + cls = _math_nodes[self.stream.current.type] + next(self.stream) + right = self.parse_concat() + left = cls(left, right, lineno=lineno) + lineno = self.stream.current.lineno + return left + + def parse_concat(self) -> nodes.Expr: + lineno = self.stream.current.lineno + args = [self.parse_math2()] + while self.stream.current.type == "tilde": + next(self.stream) + args.append(self.parse_math2()) + if len(args) == 1: + return args[0] + return nodes.Concat(args, lineno=lineno) + + def parse_math2(self) -> nodes.Expr: + lineno = self.stream.current.lineno + left = self.parse_pow() + while self.stream.current.type in ("mul", "div", "floordiv", "mod"): + cls = _math_nodes[self.stream.current.type] + next(self.stream) + right = self.parse_pow() + left = cls(left, right, lineno=lineno) + lineno = self.stream.current.lineno + return left + + def parse_pow(self) -> nodes.Expr: + lineno = self.stream.current.lineno + left = self.parse_unary() + while self.stream.current.type == "pow": + next(self.stream) + right = self.parse_unary() + left = nodes.Pow(left, right, lineno=lineno) + lineno = self.stream.current.lineno + return left + + def parse_unary(self, with_filter: bool = True) -> nodes.Expr: + token_type = self.stream.current.type + lineno = self.stream.current.lineno + node: nodes.Expr + + if token_type == "sub": + next(self.stream) + node = nodes.Neg(self.parse_unary(False), lineno=lineno) + elif token_type == "add": + next(self.stream) + node = nodes.Pos(self.parse_unary(False), lineno=lineno) + else: + node = self.parse_primary() + node = self.parse_postfix(node) + if with_filter: + node = self.parse_filter_expr(node) + return node + + def parse_primary(self) -> nodes.Expr: + token = self.stream.current + node: nodes.Expr + if token.type == "name": + if token.value in ("true", "false", "True", "False"): + node = nodes.Const(token.value in ("true", "True"), lineno=token.lineno) + elif token.value in ("none", "None"): + node = nodes.Const(None, lineno=token.lineno) + else: + node = nodes.Name(token.value, "load", lineno=token.lineno) + next(self.stream) + elif token.type == "string": + next(self.stream) + buf = [token.value] + lineno = token.lineno + while self.stream.current.type == "string": + buf.append(self.stream.current.value) + next(self.stream) + node = nodes.Const("".join(buf), lineno=lineno) + elif token.type in ("integer", "float"): + next(self.stream) + node = nodes.Const(token.value, lineno=token.lineno) + elif token.type == "lparen": + next(self.stream) + node = self.parse_tuple(explicit_parentheses=True) + self.stream.expect("rparen") + elif token.type == "lbracket": + node = self.parse_list() + elif token.type == "lbrace": + node = self.parse_dict() + else: + self.fail(f"unexpected {describe_token(token)!r}", token.lineno) + return node + + def parse_tuple( + self, + simplified: bool = False, + with_condexpr: bool = True, + extra_end_rules: t.Optional[t.Tuple[str, ...]] = None, + explicit_parentheses: bool = False, + ) -> t.Union[nodes.Tuple, nodes.Expr]: + """Works like `parse_expression` but if multiple expressions are + delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created. + This method could also return a regular expression instead of a tuple + if no commas where found. + + The default parsing mode is a full tuple. If `simplified` is `True` + only names and literals are parsed. The `no_condexpr` parameter is + forwarded to :meth:`parse_expression`. + + Because tuples do not require delimiters and may end in a bogus comma + an extra hint is needed that marks the end of a tuple. For example + for loops support tuples between `for` and `in`. In that case the + `extra_end_rules` is set to ``['name:in']``. + + `explicit_parentheses` is true if the parsing was triggered by an + expression in parentheses. This is used to figure out if an empty + tuple is a valid expression or not. + """ + lineno = self.stream.current.lineno + if simplified: + parse = self.parse_primary + elif with_condexpr: + parse = self.parse_expression + else: + + def parse() -> nodes.Expr: + return self.parse_expression(with_condexpr=False) + + args: t.List[nodes.Expr] = [] + is_tuple = False + + while True: + if args: + self.stream.expect("comma") + if self.is_tuple_end(extra_end_rules): + break + args.append(parse()) + if self.stream.current.type == "comma": + is_tuple = True + else: + break + lineno = self.stream.current.lineno + + if not is_tuple: + if args: + return args[0] + + # if we don't have explicit parentheses, an empty tuple is + # not a valid expression. This would mean nothing (literally + # nothing) in the spot of an expression would be an empty + # tuple. + if not explicit_parentheses: + self.fail( + "Expected an expression," + f" got {describe_token(self.stream.current)!r}" + ) + + return nodes.Tuple(args, "load", lineno=lineno) + + def parse_list(self) -> nodes.List: + token = self.stream.expect("lbracket") + items: t.List[nodes.Expr] = [] + while self.stream.current.type != "rbracket": + if items: + self.stream.expect("comma") + if self.stream.current.type == "rbracket": + break + items.append(self.parse_expression()) + self.stream.expect("rbracket") + return nodes.List(items, lineno=token.lineno) + + def parse_dict(self) -> nodes.Dict: + token = self.stream.expect("lbrace") + items: t.List[nodes.Pair] = [] + while self.stream.current.type != "rbrace": + if items: + self.stream.expect("comma") + if self.stream.current.type == "rbrace": + break + key = self.parse_expression() + self.stream.expect("colon") + value = self.parse_expression() + items.append(nodes.Pair(key, value, lineno=key.lineno)) + self.stream.expect("rbrace") + return nodes.Dict(items, lineno=token.lineno) + + def parse_postfix(self, node: nodes.Expr) -> nodes.Expr: + while True: + token_type = self.stream.current.type + if token_type == "dot" or token_type == "lbracket": + node = self.parse_subscript(node) + # calls are valid both after postfix expressions (getattr + # and getitem) as well as filters and tests + elif token_type == "lparen": + node = self.parse_call(node) + else: + break + return node + + def parse_filter_expr(self, node: nodes.Expr) -> nodes.Expr: + while True: + token_type = self.stream.current.type + if token_type == "pipe": + node = self.parse_filter(node) # type: ignore + elif token_type == "name" and self.stream.current.value == "is": + node = self.parse_test(node) + # calls are valid both after postfix expressions (getattr + # and getitem) as well as filters and tests + elif token_type == "lparen": + node = self.parse_call(node) + else: + break + return node + + def parse_subscript( + self, node: nodes.Expr + ) -> t.Union[nodes.Getattr, nodes.Getitem]: + token = next(self.stream) + arg: nodes.Expr + + if token.type == "dot": + attr_token = self.stream.current + next(self.stream) + if attr_token.type == "name": + return nodes.Getattr( + node, attr_token.value, "load", lineno=token.lineno + ) + elif attr_token.type != "integer": + self.fail("expected name or number", attr_token.lineno) + arg = nodes.Const(attr_token.value, lineno=attr_token.lineno) + return nodes.Getitem(node, arg, "load", lineno=token.lineno) + if token.type == "lbracket": + args: t.List[nodes.Expr] = [] + while self.stream.current.type != "rbracket": + if args: + self.stream.expect("comma") + args.append(self.parse_subscribed()) + self.stream.expect("rbracket") + if len(args) == 1: + arg = args[0] + else: + arg = nodes.Tuple(args, "load", lineno=token.lineno) + return nodes.Getitem(node, arg, "load", lineno=token.lineno) + self.fail("expected subscript expression", token.lineno) + + def parse_subscribed(self) -> nodes.Expr: + lineno = self.stream.current.lineno + args: t.List[t.Optional[nodes.Expr]] + + if self.stream.current.type == "colon": + next(self.stream) + args = [None] + else: + node = self.parse_expression() + if self.stream.current.type != "colon": + return node + next(self.stream) + args = [node] + + if self.stream.current.type == "colon": + args.append(None) + elif self.stream.current.type not in ("rbracket", "comma"): + args.append(self.parse_expression()) + else: + args.append(None) + + if self.stream.current.type == "colon": + next(self.stream) + if self.stream.current.type not in ("rbracket", "comma"): + args.append(self.parse_expression()) + else: + args.append(None) + else: + args.append(None) + + return nodes.Slice(lineno=lineno, *args) + + def parse_call_args(self) -> t.Tuple: + token = self.stream.expect("lparen") + args = [] + kwargs = [] + dyn_args = None + dyn_kwargs = None + require_comma = False + + def ensure(expr: bool) -> None: + if not expr: + self.fail("invalid syntax for function call expression", token.lineno) + + while self.stream.current.type != "rparen": + if require_comma: + self.stream.expect("comma") + + # support for trailing comma + if self.stream.current.type == "rparen": + break + + if self.stream.current.type == "mul": + ensure(dyn_args is None and dyn_kwargs is None) + next(self.stream) + dyn_args = self.parse_expression() + elif self.stream.current.type == "pow": + ensure(dyn_kwargs is None) + next(self.stream) + dyn_kwargs = self.parse_expression() + else: + if ( + self.stream.current.type == "name" + and self.stream.look().type == "assign" + ): + # Parsing a kwarg + ensure(dyn_kwargs is None) + key = self.stream.current.value + self.stream.skip(2) + value = self.parse_expression() + kwargs.append(nodes.Keyword(key, value, lineno=value.lineno)) + else: + # Parsing an arg + ensure(dyn_args is None and dyn_kwargs is None and not kwargs) + args.append(self.parse_expression()) + + require_comma = True + + self.stream.expect("rparen") + return args, kwargs, dyn_args, dyn_kwargs + + def parse_call(self, node: nodes.Expr) -> nodes.Call: + # The lparen will be expected in parse_call_args, but the lineno + # needs to be recorded before the stream is advanced. + token = self.stream.current + args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args() + return nodes.Call(node, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno) + + def parse_filter( + self, node: t.Optional[nodes.Expr], start_inline: bool = False + ) -> t.Optional[nodes.Expr]: + while self.stream.current.type == "pipe" or start_inline: + if not start_inline: + next(self.stream) + token = self.stream.expect("name") + name = token.value + while self.stream.current.type == "dot": + next(self.stream) + name += "." + self.stream.expect("name").value + if self.stream.current.type == "lparen": + args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args() + else: + args = [] + kwargs = [] + dyn_args = dyn_kwargs = None + node = nodes.Filter( + node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno + ) + start_inline = False + return node + + def parse_test(self, node: nodes.Expr) -> nodes.Expr: + token = next(self.stream) + if self.stream.current.test("name:not"): + next(self.stream) + negated = True + else: + negated = False + name = self.stream.expect("name").value + while self.stream.current.type == "dot": + next(self.stream) + name += "." + self.stream.expect("name").value + dyn_args = dyn_kwargs = None + kwargs = [] + if self.stream.current.type == "lparen": + args, kwargs, dyn_args, dyn_kwargs = self.parse_call_args() + elif self.stream.current.type in { + "name", + "string", + "integer", + "float", + "lparen", + "lbracket", + "lbrace", + } and not self.stream.current.test_any("name:else", "name:or", "name:and"): + if self.stream.current.test("name:is"): + self.fail("You cannot chain multiple tests with is") + arg_node = self.parse_primary() + arg_node = self.parse_postfix(arg_node) + args = [arg_node] + else: + args = [] + node = nodes.Test( + node, name, args, kwargs, dyn_args, dyn_kwargs, lineno=token.lineno + ) + if negated: + node = nodes.Not(node, lineno=token.lineno) + return node + + def subparse( + self, end_tokens: t.Optional[t.Tuple[str, ...]] = None + ) -> t.List[nodes.Node]: + body: t.List[nodes.Node] = [] + data_buffer: t.List[nodes.Node] = [] + add_data = data_buffer.append + + if end_tokens is not None: + self._end_token_stack.append(end_tokens) + + def flush_data() -> None: + if data_buffer: + lineno = data_buffer[0].lineno + body.append(nodes.Output(data_buffer[:], lineno=lineno)) + del data_buffer[:] + + try: + while self.stream: + token = self.stream.current + if token.type == "data": + if token.value: + add_data(nodes.TemplateData(token.value, lineno=token.lineno)) + next(self.stream) + elif token.type == "variable_begin": + next(self.stream) + add_data(self.parse_tuple(with_condexpr=True)) + self.stream.expect("variable_end") + elif token.type == "block_begin": + flush_data() + next(self.stream) + if end_tokens is not None and self.stream.current.test_any( + *end_tokens + ): + return body + rv = self.parse_statement() + if isinstance(rv, list): + body.extend(rv) + else: + body.append(rv) + self.stream.expect("block_end") + else: + raise AssertionError("internal parsing error") + + flush_data() + finally: + if end_tokens is not None: + self._end_token_stack.pop() + return body + + def parse(self) -> nodes.Template: + """Parse the whole template into a `Template` node.""" + result = nodes.Template(self.subparse(), lineno=1) + result.set_environment(self.environment) + return result diff --git a/backend/test/lib/python3.8/site-packages/jinja2/py.typed b/backend/test/lib/python3.8/site-packages/jinja2/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/jinja2/runtime.py b/backend/test/lib/python3.8/site-packages/jinja2/runtime.py new file mode 100644 index 0000000000000000000000000000000000000000..985842b284270bcd52855029f13d3da19d718349 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/runtime.py @@ -0,0 +1,1053 @@ +"""The runtime functions and state used by compiled templates.""" +import functools +import sys +import typing as t +from collections import abc +from itertools import chain + +from markupsafe import escape # noqa: F401 +from markupsafe import Markup +from markupsafe import soft_str + +from .async_utils import auto_aiter +from .async_utils import auto_await # noqa: F401 +from .exceptions import TemplateNotFound # noqa: F401 +from .exceptions import TemplateRuntimeError # noqa: F401 +from .exceptions import UndefinedError +from .nodes import EvalContext +from .utils import _PassArg +from .utils import concat +from .utils import internalcode +from .utils import missing +from .utils import Namespace # noqa: F401 +from .utils import object_type_repr +from .utils import pass_eval_context + +V = t.TypeVar("V") +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + +if t.TYPE_CHECKING: + import logging + import typing_extensions as te + from .environment import Environment + + class LoopRenderFunc(te.Protocol): + def __call__( + self, + reciter: t.Iterable[V], + loop_render_func: "LoopRenderFunc", + depth: int = 0, + ) -> str: + ... + + +# these variables are exported to the template runtime +exported = [ + "LoopContext", + "TemplateReference", + "Macro", + "Markup", + "TemplateRuntimeError", + "missing", + "escape", + "markup_join", + "str_join", + "identity", + "TemplateNotFound", + "Namespace", + "Undefined", + "internalcode", +] +async_exported = [ + "AsyncLoopContext", + "auto_aiter", + "auto_await", +] + + +def identity(x: V) -> V: + """Returns its argument. Useful for certain things in the + environment. + """ + return x + + +def markup_join(seq: t.Iterable[t.Any]) -> str: + """Concatenation that escapes if necessary and converts to string.""" + buf = [] + iterator = map(soft_str, seq) + for arg in iterator: + buf.append(arg) + if hasattr(arg, "__html__"): + return Markup("").join(chain(buf, iterator)) + return concat(buf) + + +def str_join(seq: t.Iterable[t.Any]) -> str: + """Simple args to string conversion and concatenation.""" + return concat(map(str, seq)) + + +def new_context( + environment: "Environment", + template_name: t.Optional[str], + blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]], + vars: t.Optional[t.Dict[str, t.Any]] = None, + shared: bool = False, + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + locals: t.Optional[t.Mapping[str, t.Any]] = None, +) -> "Context": + """Internal helper for context creation.""" + if vars is None: + vars = {} + if shared: + parent = vars + else: + parent = dict(globals or (), **vars) + if locals: + # if the parent is shared a copy should be created because + # we don't want to modify the dict passed + if shared: + parent = dict(parent) + for key, value in locals.items(): + if value is not missing: + parent[key] = value + return environment.context_class( + environment, parent, template_name, blocks, globals=globals + ) + + +class TemplateReference: + """The `self` in templates.""" + + def __init__(self, context: "Context") -> None: + self.__context = context + + def __getitem__(self, name: str) -> t.Any: + blocks = self.__context.blocks[name] + return BlockReference(name, self.__context, blocks, 0) + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.__context.name!r}>" + + +def _dict_method_all(dict_method: F) -> F: + @functools.wraps(dict_method) + def f_all(self: "Context") -> t.Any: + return dict_method(self.get_all()) + + return t.cast(F, f_all) + + +@abc.Mapping.register +class Context: + """The template context holds the variables of a template. It stores the + values passed to the template and also the names the template exports. + Creating instances is neither supported nor useful as it's created + automatically at various stages of the template evaluation and should not + be created by hand. + + The context is immutable. Modifications on :attr:`parent` **must not** + happen and modifications on :attr:`vars` are allowed from generated + template code only. Template filters and global functions marked as + :func:`pass_context` get the active context passed as first argument + and are allowed to access the context read-only. + + The template context supports read only dict operations (`get`, + `keys`, `values`, `items`, `iterkeys`, `itervalues`, `iteritems`, + `__getitem__`, `__contains__`). Additionally there is a :meth:`resolve` + method that doesn't fail with a `KeyError` but returns an + :class:`Undefined` object for missing variables. + """ + + def __init__( + self, + environment: "Environment", + parent: t.Dict[str, t.Any], + name: t.Optional[str], + blocks: t.Dict[str, t.Callable[["Context"], t.Iterator[str]]], + globals: t.Optional[t.MutableMapping[str, t.Any]] = None, + ): + self.parent = parent + self.vars: t.Dict[str, t.Any] = {} + self.environment: "Environment" = environment + self.eval_ctx = EvalContext(self.environment, name) + self.exported_vars: t.Set[str] = set() + self.name = name + self.globals_keys = set() if globals is None else set(globals) + + # create the initial mapping of blocks. Whenever template inheritance + # takes place the runtime will update this mapping with the new blocks + # from the template. + self.blocks = {k: [v] for k, v in blocks.items()} + + def super( + self, name: str, current: t.Callable[["Context"], t.Iterator[str]] + ) -> t.Union["BlockReference", "Undefined"]: + """Render a parent block.""" + try: + blocks = self.blocks[name] + index = blocks.index(current) + 1 + blocks[index] + except LookupError: + return self.environment.undefined( + f"there is no parent block called {name!r}.", name="super" + ) + return BlockReference(name, self, blocks, index) + + def get(self, key: str, default: t.Any = None) -> t.Any: + """Look up a variable by name, or return a default if the key is + not found. + + :param key: The variable name to look up. + :param default: The value to return if the key is not found. + """ + try: + return self[key] + except KeyError: + return default + + def resolve(self, key: str) -> t.Union[t.Any, "Undefined"]: + """Look up a variable by name, or return an :class:`Undefined` + object if the key is not found. + + If you need to add custom behavior, override + :meth:`resolve_or_missing`, not this method. The various lookup + functions use that method, not this one. + + :param key: The variable name to look up. + """ + rv = self.resolve_or_missing(key) + + if rv is missing: + return self.environment.undefined(name=key) + + return rv + + def resolve_or_missing(self, key: str) -> t.Any: + """Look up a variable by name, or return a ``missing`` sentinel + if the key is not found. + + Override this method to add custom lookup behavior. + :meth:`resolve`, :meth:`get`, and :meth:`__getitem__` use this + method. Don't call this method directly. + + :param key: The variable name to look up. + """ + if key in self.vars: + return self.vars[key] + + if key in self.parent: + return self.parent[key] + + return missing + + def get_exported(self) -> t.Dict[str, t.Any]: + """Get a new dict with the exported variables.""" + return {k: self.vars[k] for k in self.exported_vars} + + def get_all(self) -> t.Dict[str, t.Any]: + """Return the complete context as dict including the exported + variables. For optimizations reasons this might not return an + actual copy so be careful with using it. + """ + if not self.vars: + return self.parent + if not self.parent: + return self.vars + return dict(self.parent, **self.vars) + + @internalcode + def call( + __self, __obj: t.Callable, *args: t.Any, **kwargs: t.Any # noqa: B902 + ) -> t.Union[t.Any, "Undefined"]: + """Call the callable with the arguments and keyword arguments + provided but inject the active context or environment as first + argument if the callable has :func:`pass_context` or + :func:`pass_environment`. + """ + if __debug__: + __traceback_hide__ = True # noqa + + # Allow callable classes to take a context + if ( + hasattr(__obj, "__call__") # noqa: B004 + and _PassArg.from_obj(__obj.__call__) is not None # type: ignore + ): + __obj = __obj.__call__ # type: ignore + + pass_arg = _PassArg.from_obj(__obj) + + if pass_arg is _PassArg.context: + # the active context should have access to variables set in + # loops and blocks without mutating the context itself + if kwargs.get("_loop_vars"): + __self = __self.derived(kwargs["_loop_vars"]) + if kwargs.get("_block_vars"): + __self = __self.derived(kwargs["_block_vars"]) + args = (__self,) + args + elif pass_arg is _PassArg.eval_context: + args = (__self.eval_ctx,) + args + elif pass_arg is _PassArg.environment: + args = (__self.environment,) + args + + kwargs.pop("_block_vars", None) + kwargs.pop("_loop_vars", None) + + try: + return __obj(*args, **kwargs) + except StopIteration: + return __self.environment.undefined( + "value was undefined because a callable raised a" + " StopIteration exception" + ) + + def derived(self, locals: t.Optional[t.Dict[str, t.Any]] = None) -> "Context": + """Internal helper function to create a derived context. This is + used in situations where the system needs a new context in the same + template that is independent. + """ + context = new_context( + self.environment, self.name, {}, self.get_all(), True, None, locals + ) + context.eval_ctx = self.eval_ctx + context.blocks.update((k, list(v)) for k, v in self.blocks.items()) + return context + + keys = _dict_method_all(dict.keys) + values = _dict_method_all(dict.values) + items = _dict_method_all(dict.items) + + def __contains__(self, name: str) -> bool: + return name in self.vars or name in self.parent + + def __getitem__(self, key: str) -> t.Any: + """Look up a variable by name with ``[]`` syntax, or raise a + ``KeyError`` if the key is not found. + """ + item = self.resolve_or_missing(key) + + if item is missing: + raise KeyError(key) + + return item + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.get_all()!r} of {self.name!r}>" + + +class BlockReference: + """One block on a template reference.""" + + def __init__( + self, + name: str, + context: "Context", + stack: t.List[t.Callable[["Context"], t.Iterator[str]]], + depth: int, + ) -> None: + self.name = name + self._context = context + self._stack = stack + self._depth = depth + + @property + def super(self) -> t.Union["BlockReference", "Undefined"]: + """Super the block.""" + if self._depth + 1 >= len(self._stack): + return self._context.environment.undefined( + f"there is no parent block called {self.name!r}.", name="super" + ) + return BlockReference(self.name, self._context, self._stack, self._depth + 1) + + @internalcode + async def _async_call(self) -> str: + rv = concat( + [x async for x in self._stack[self._depth](self._context)] # type: ignore + ) + + if self._context.eval_ctx.autoescape: + return Markup(rv) + + return rv + + @internalcode + def __call__(self) -> str: + if self._context.environment.is_async: + return self._async_call() # type: ignore + + rv = concat(self._stack[self._depth](self._context)) + + if self._context.eval_ctx.autoescape: + return Markup(rv) + + return rv + + +class LoopContext: + """A wrapper iterable for dynamic ``for`` loops, with information + about the loop and iteration. + """ + + #: Current iteration of the loop, starting at 0. + index0 = -1 + + _length: t.Optional[int] = None + _after: t.Any = missing + _current: t.Any = missing + _before: t.Any = missing + _last_changed_value: t.Any = missing + + def __init__( + self, + iterable: t.Iterable[V], + undefined: t.Type["Undefined"], + recurse: t.Optional["LoopRenderFunc"] = None, + depth0: int = 0, + ) -> None: + """ + :param iterable: Iterable to wrap. + :param undefined: :class:`Undefined` class to use for next and + previous items. + :param recurse: The function to render the loop body when the + loop is marked recursive. + :param depth0: Incremented when looping recursively. + """ + self._iterable = iterable + self._iterator = self._to_iterator(iterable) + self._undefined = undefined + self._recurse = recurse + #: How many levels deep a recursive loop currently is, starting at 0. + self.depth0 = depth0 + + @staticmethod + def _to_iterator(iterable: t.Iterable[V]) -> t.Iterator[V]: + return iter(iterable) + + @property + def length(self) -> int: + """Length of the iterable. + + If the iterable is a generator or otherwise does not have a + size, it is eagerly evaluated to get a size. + """ + if self._length is not None: + return self._length + + try: + self._length = len(self._iterable) # type: ignore + except TypeError: + iterable = list(self._iterator) + self._iterator = self._to_iterator(iterable) + self._length = len(iterable) + self.index + (self._after is not missing) + + return self._length + + def __len__(self) -> int: + return self.length + + @property + def depth(self) -> int: + """How many levels deep a recursive loop currently is, starting at 1.""" + return self.depth0 + 1 + + @property + def index(self) -> int: + """Current iteration of the loop, starting at 1.""" + return self.index0 + 1 + + @property + def revindex0(self) -> int: + """Number of iterations from the end of the loop, ending at 0. + + Requires calculating :attr:`length`. + """ + return self.length - self.index + + @property + def revindex(self) -> int: + """Number of iterations from the end of the loop, ending at 1. + + Requires calculating :attr:`length`. + """ + return self.length - self.index0 + + @property + def first(self) -> bool: + """Whether this is the first iteration of the loop.""" + return self.index0 == 0 + + def _peek_next(self) -> t.Any: + """Return the next element in the iterable, or :data:`missing` + if the iterable is exhausted. Only peeks one item ahead, caching + the result in :attr:`_last` for use in subsequent checks. The + cache is reset when :meth:`__next__` is called. + """ + if self._after is not missing: + return self._after + + self._after = next(self._iterator, missing) + return self._after + + @property + def last(self) -> bool: + """Whether this is the last iteration of the loop. + + Causes the iterable to advance early. See + :func:`itertools.groupby` for issues this can cause. + The :func:`groupby` filter avoids that issue. + """ + return self._peek_next() is missing + + @property + def previtem(self) -> t.Union[t.Any, "Undefined"]: + """The item in the previous iteration. Undefined during the + first iteration. + """ + if self.first: + return self._undefined("there is no previous item") + + return self._before + + @property + def nextitem(self) -> t.Union[t.Any, "Undefined"]: + """The item in the next iteration. Undefined during the last + iteration. + + Causes the iterable to advance early. See + :func:`itertools.groupby` for issues this can cause. + The :func:`jinja-filters.groupby` filter avoids that issue. + """ + rv = self._peek_next() + + if rv is missing: + return self._undefined("there is no next item") + + return rv + + def cycle(self, *args: V) -> V: + """Return a value from the given args, cycling through based on + the current :attr:`index0`. + + :param args: One or more values to cycle through. + """ + if not args: + raise TypeError("no items for cycling given") + + return args[self.index0 % len(args)] + + def changed(self, *value: t.Any) -> bool: + """Return ``True`` if previously called with a different value + (including when called for the first time). + + :param value: One or more values to compare to the last call. + """ + if self._last_changed_value != value: + self._last_changed_value = value + return True + + return False + + def __iter__(self) -> "LoopContext": + return self + + def __next__(self) -> t.Tuple[t.Any, "LoopContext"]: + if self._after is not missing: + rv = self._after + self._after = missing + else: + rv = next(self._iterator) + + self.index0 += 1 + self._before = self._current + self._current = rv + return rv, self + + @internalcode + def __call__(self, iterable: t.Iterable[V]) -> str: + """When iterating over nested data, render the body of the loop + recursively with the given inner iterable data. + + The loop must have the ``recursive`` marker for this to work. + """ + if self._recurse is None: + raise TypeError( + "The loop must have the 'recursive' marker to be called recursively." + ) + + return self._recurse(iterable, self._recurse, depth=self.depth) + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.index}/{self.length}>" + + +class AsyncLoopContext(LoopContext): + _iterator: t.AsyncIterator[t.Any] # type: ignore + + @staticmethod + def _to_iterator( # type: ignore + iterable: t.Union[t.Iterable[V], t.AsyncIterable[V]] + ) -> t.AsyncIterator[V]: + return auto_aiter(iterable) + + @property + async def length(self) -> int: # type: ignore + if self._length is not None: + return self._length + + try: + self._length = len(self._iterable) # type: ignore + except TypeError: + iterable = [x async for x in self._iterator] + self._iterator = self._to_iterator(iterable) + self._length = len(iterable) + self.index + (self._after is not missing) + + return self._length + + @property + async def revindex0(self) -> int: # type: ignore + return await self.length - self.index + + @property + async def revindex(self) -> int: # type: ignore + return await self.length - self.index0 + + async def _peek_next(self) -> t.Any: + if self._after is not missing: + return self._after + + try: + self._after = await self._iterator.__anext__() + except StopAsyncIteration: + self._after = missing + + return self._after + + @property + async def last(self) -> bool: # type: ignore + return await self._peek_next() is missing + + @property + async def nextitem(self) -> t.Union[t.Any, "Undefined"]: + rv = await self._peek_next() + + if rv is missing: + return self._undefined("there is no next item") + + return rv + + def __aiter__(self) -> "AsyncLoopContext": + return self + + async def __anext__(self) -> t.Tuple[t.Any, "AsyncLoopContext"]: + if self._after is not missing: + rv = self._after + self._after = missing + else: + rv = await self._iterator.__anext__() + + self.index0 += 1 + self._before = self._current + self._current = rv + return rv, self + + +class Macro: + """Wraps a macro function.""" + + def __init__( + self, + environment: "Environment", + func: t.Callable[..., str], + name: str, + arguments: t.List[str], + catch_kwargs: bool, + catch_varargs: bool, + caller: bool, + default_autoescape: t.Optional[bool] = None, + ): + self._environment = environment + self._func = func + self._argument_count = len(arguments) + self.name = name + self.arguments = arguments + self.catch_kwargs = catch_kwargs + self.catch_varargs = catch_varargs + self.caller = caller + self.explicit_caller = "caller" in arguments + + if default_autoescape is None: + if callable(environment.autoescape): + default_autoescape = environment.autoescape(None) + else: + default_autoescape = environment.autoescape + + self._default_autoescape = default_autoescape + + @internalcode + @pass_eval_context + def __call__(self, *args: t.Any, **kwargs: t.Any) -> str: + # This requires a bit of explanation, In the past we used to + # decide largely based on compile-time information if a macro is + # safe or unsafe. While there was a volatile mode it was largely + # unused for deciding on escaping. This turns out to be + # problematic for macros because whether a macro is safe depends not + # on the escape mode when it was defined, but rather when it was used. + # + # Because however we export macros from the module system and + # there are historic callers that do not pass an eval context (and + # will continue to not pass one), we need to perform an instance + # check here. + # + # This is considered safe because an eval context is not a valid + # argument to callables otherwise anyway. Worst case here is + # that if no eval context is passed we fall back to the compile + # time autoescape flag. + if args and isinstance(args[0], EvalContext): + autoescape = args[0].autoescape + args = args[1:] + else: + autoescape = self._default_autoescape + + # try to consume the positional arguments + arguments = list(args[: self._argument_count]) + off = len(arguments) + + # For information why this is necessary refer to the handling + # of caller in the `macro_body` handler in the compiler. + found_caller = False + + # if the number of arguments consumed is not the number of + # arguments expected we start filling in keyword arguments + # and defaults. + if off != self._argument_count: + for name in self.arguments[len(arguments) :]: + try: + value = kwargs.pop(name) + except KeyError: + value = missing + if name == "caller": + found_caller = True + arguments.append(value) + else: + found_caller = self.explicit_caller + + # it's important that the order of these arguments does not change + # if not also changed in the compiler's `function_scoping` method. + # the order is caller, keyword arguments, positional arguments! + if self.caller and not found_caller: + caller = kwargs.pop("caller", None) + if caller is None: + caller = self._environment.undefined("No caller defined", name="caller") + arguments.append(caller) + + if self.catch_kwargs: + arguments.append(kwargs) + elif kwargs: + if "caller" in kwargs: + raise TypeError( + f"macro {self.name!r} was invoked with two values for the special" + " caller argument. This is most likely a bug." + ) + raise TypeError( + f"macro {self.name!r} takes no keyword argument {next(iter(kwargs))!r}" + ) + if self.catch_varargs: + arguments.append(args[self._argument_count :]) + elif len(args) > self._argument_count: + raise TypeError( + f"macro {self.name!r} takes not more than" + f" {len(self.arguments)} argument(s)" + ) + + return self._invoke(arguments, autoescape) + + async def _async_invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str: + rv = await self._func(*arguments) # type: ignore + + if autoescape: + return Markup(rv) + + return rv # type: ignore + + def _invoke(self, arguments: t.List[t.Any], autoescape: bool) -> str: + if self._environment.is_async: + return self._async_invoke(arguments, autoescape) # type: ignore + + rv = self._func(*arguments) + + if autoescape: + rv = Markup(rv) + + return rv + + def __repr__(self) -> str: + name = "anonymous" if self.name is None else repr(self.name) + return f"<{type(self).__name__} {name}>" + + +class Undefined: + """The default undefined type. This undefined type can be printed and + iterated over, but every other access will raise an :exc:`UndefinedError`: + + >>> foo = Undefined(name='foo') + >>> str(foo) + '' + >>> not foo + True + >>> foo + 42 + Traceback (most recent call last): + ... + jinja2.exceptions.UndefinedError: 'foo' is undefined + """ + + __slots__ = ( + "_undefined_hint", + "_undefined_obj", + "_undefined_name", + "_undefined_exception", + ) + + def __init__( + self, + hint: t.Optional[str] = None, + obj: t.Any = missing, + name: t.Optional[str] = None, + exc: t.Type[TemplateRuntimeError] = UndefinedError, + ) -> None: + self._undefined_hint = hint + self._undefined_obj = obj + self._undefined_name = name + self._undefined_exception = exc + + @property + def _undefined_message(self) -> str: + """Build a message about the undefined value based on how it was + accessed. + """ + if self._undefined_hint: + return self._undefined_hint + + if self._undefined_obj is missing: + return f"{self._undefined_name!r} is undefined" + + if not isinstance(self._undefined_name, str): + return ( + f"{object_type_repr(self._undefined_obj)} has no" + f" element {self._undefined_name!r}" + ) + + return ( + f"{object_type_repr(self._undefined_obj)!r} has no" + f" attribute {self._undefined_name!r}" + ) + + @internalcode + def _fail_with_undefined_error( + self, *args: t.Any, **kwargs: t.Any + ) -> "te.NoReturn": + """Raise an :exc:`UndefinedError` when operations are performed + on the undefined value. + """ + raise self._undefined_exception(self._undefined_message) + + @internalcode + def __getattr__(self, name: str) -> t.Any: + if name[:2] == "__": + raise AttributeError(name) + + return self._fail_with_undefined_error() + + __add__ = __radd__ = __sub__ = __rsub__ = _fail_with_undefined_error + __mul__ = __rmul__ = __div__ = __rdiv__ = _fail_with_undefined_error + __truediv__ = __rtruediv__ = _fail_with_undefined_error + __floordiv__ = __rfloordiv__ = _fail_with_undefined_error + __mod__ = __rmod__ = _fail_with_undefined_error + __pos__ = __neg__ = _fail_with_undefined_error + __call__ = __getitem__ = _fail_with_undefined_error + __lt__ = __le__ = __gt__ = __ge__ = _fail_with_undefined_error + __int__ = __float__ = __complex__ = _fail_with_undefined_error + __pow__ = __rpow__ = _fail_with_undefined_error + + def __eq__(self, other: t.Any) -> bool: + return type(self) is type(other) + + def __ne__(self, other: t.Any) -> bool: + return not self.__eq__(other) + + def __hash__(self) -> int: + return id(type(self)) + + def __str__(self) -> str: + return "" + + def __len__(self) -> int: + return 0 + + def __iter__(self) -> t.Iterator[t.Any]: + yield from () + + async def __aiter__(self) -> t.AsyncIterator[t.Any]: + for _ in (): + yield + + def __bool__(self) -> bool: + return False + + def __repr__(self) -> str: + return "Undefined" + + +def make_logging_undefined( + logger: t.Optional["logging.Logger"] = None, base: t.Type[Undefined] = Undefined +) -> t.Type[Undefined]: + """Given a logger object this returns a new undefined class that will + log certain failures. It will log iterations and printing. If no + logger is given a default logger is created. + + Example:: + + logger = logging.getLogger(__name__) + LoggingUndefined = make_logging_undefined( + logger=logger, + base=Undefined + ) + + .. versionadded:: 2.8 + + :param logger: the logger to use. If not provided, a default logger + is created. + :param base: the base class to add logging functionality to. This + defaults to :class:`Undefined`. + """ + if logger is None: + import logging + + logger = logging.getLogger(__name__) + logger.addHandler(logging.StreamHandler(sys.stderr)) + + def _log_message(undef: Undefined) -> None: + logger.warning( # type: ignore + "Template variable warning: %s", undef._undefined_message + ) + + class LoggingUndefined(base): # type: ignore + __slots__ = () + + def _fail_with_undefined_error( # type: ignore + self, *args: t.Any, **kwargs: t.Any + ) -> "te.NoReturn": + try: + super()._fail_with_undefined_error(*args, **kwargs) + except self._undefined_exception as e: + logger.error("Template variable error: %s", e) # type: ignore + raise e + + def __str__(self) -> str: + _log_message(self) + return super().__str__() # type: ignore + + def __iter__(self) -> t.Iterator[t.Any]: + _log_message(self) + return super().__iter__() # type: ignore + + def __bool__(self) -> bool: + _log_message(self) + return super().__bool__() # type: ignore + + return LoggingUndefined + + +class ChainableUndefined(Undefined): + """An undefined that is chainable, where both ``__getattr__`` and + ``__getitem__`` return itself rather than raising an + :exc:`UndefinedError`. + + >>> foo = ChainableUndefined(name='foo') + >>> str(foo.bar['baz']) + '' + >>> foo.bar['baz'] + 42 + Traceback (most recent call last): + ... + jinja2.exceptions.UndefinedError: 'foo' is undefined + + .. versionadded:: 2.11.0 + """ + + __slots__ = () + + def __html__(self) -> str: + return str(self) + + def __getattr__(self, _: str) -> "ChainableUndefined": + return self + + __getitem__ = __getattr__ # type: ignore + + +class DebugUndefined(Undefined): + """An undefined that returns the debug info when printed. + + >>> foo = DebugUndefined(name='foo') + >>> str(foo) + '{{ foo }}' + >>> not foo + True + >>> foo + 42 + Traceback (most recent call last): + ... + jinja2.exceptions.UndefinedError: 'foo' is undefined + """ + + __slots__ = () + + def __str__(self) -> str: + if self._undefined_hint: + message = f"undefined value printed: {self._undefined_hint}" + + elif self._undefined_obj is missing: + message = self._undefined_name # type: ignore + + else: + message = ( + f"no such element: {object_type_repr(self._undefined_obj)}" + f"[{self._undefined_name!r}]" + ) + + return f"{{{{ {message} }}}}" + + +class StrictUndefined(Undefined): + """An undefined that barks on print and iteration as well as boolean + tests and all kinds of comparisons. In other words: you can do nothing + with it except checking if it's defined using the `defined` test. + + >>> foo = StrictUndefined(name='foo') + >>> str(foo) + Traceback (most recent call last): + ... + jinja2.exceptions.UndefinedError: 'foo' is undefined + >>> not foo + Traceback (most recent call last): + ... + jinja2.exceptions.UndefinedError: 'foo' is undefined + >>> foo + 42 + Traceback (most recent call last): + ... + jinja2.exceptions.UndefinedError: 'foo' is undefined + """ + + __slots__ = () + __iter__ = __str__ = __len__ = Undefined._fail_with_undefined_error + __eq__ = __ne__ = __bool__ = __hash__ = Undefined._fail_with_undefined_error + __contains__ = Undefined._fail_with_undefined_error + + +# Remove slots attributes, after the metaclass is applied they are +# unneeded and contain wrong data for subclasses. +del ( + Undefined.__slots__, + ChainableUndefined.__slots__, + DebugUndefined.__slots__, + StrictUndefined.__slots__, +) diff --git a/backend/test/lib/python3.8/site-packages/jinja2/sandbox.py b/backend/test/lib/python3.8/site-packages/jinja2/sandbox.py new file mode 100644 index 0000000000000000000000000000000000000000..06d74148eccea79d1f5a0ca2fb76ecc246f87d62 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/sandbox.py @@ -0,0 +1,428 @@ +"""A sandbox layer that ensures unsafe operations cannot be performed. +Useful when the template itself comes from an untrusted source. +""" +import operator +import types +import typing as t +from _string import formatter_field_name_split # type: ignore +from collections import abc +from collections import deque +from string import Formatter + +from markupsafe import EscapeFormatter +from markupsafe import Markup + +from .environment import Environment +from .exceptions import SecurityError +from .runtime import Context +from .runtime import Undefined + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + +#: maximum number of items a range may produce +MAX_RANGE = 100000 + +#: Unsafe function attributes. +UNSAFE_FUNCTION_ATTRIBUTES: t.Set[str] = set() + +#: Unsafe method attributes. Function attributes are unsafe for methods too. +UNSAFE_METHOD_ATTRIBUTES: t.Set[str] = set() + +#: unsafe generator attributes. +UNSAFE_GENERATOR_ATTRIBUTES = {"gi_frame", "gi_code"} + +#: unsafe attributes on coroutines +UNSAFE_COROUTINE_ATTRIBUTES = {"cr_frame", "cr_code"} + +#: unsafe attributes on async generators +UNSAFE_ASYNC_GENERATOR_ATTRIBUTES = {"ag_code", "ag_frame"} + +_mutable_spec: t.Tuple[t.Tuple[t.Type, t.FrozenSet[str]], ...] = ( + ( + abc.MutableSet, + frozenset( + [ + "add", + "clear", + "difference_update", + "discard", + "pop", + "remove", + "symmetric_difference_update", + "update", + ] + ), + ), + ( + abc.MutableMapping, + frozenset(["clear", "pop", "popitem", "setdefault", "update"]), + ), + ( + abc.MutableSequence, + frozenset(["append", "reverse", "insert", "sort", "extend", "remove"]), + ), + ( + deque, + frozenset( + [ + "append", + "appendleft", + "clear", + "extend", + "extendleft", + "pop", + "popleft", + "remove", + "rotate", + ] + ), + ), +) + + +def inspect_format_method(callable: t.Callable) -> t.Optional[str]: + if not isinstance( + callable, (types.MethodType, types.BuiltinMethodType) + ) or callable.__name__ not in ("format", "format_map"): + return None + + obj = callable.__self__ + + if isinstance(obj, str): + return obj + + return None + + +def safe_range(*args: int) -> range: + """A range that can't generate ranges with a length of more than + MAX_RANGE items. + """ + rng = range(*args) + + if len(rng) > MAX_RANGE: + raise OverflowError( + "Range too big. The sandbox blocks ranges larger than" + f" MAX_RANGE ({MAX_RANGE})." + ) + + return rng + + +def unsafe(f: F) -> F: + """Marks a function or method as unsafe. + + .. code-block: python + + @unsafe + def delete(self): + pass + """ + f.unsafe_callable = True # type: ignore + return f + + +def is_internal_attribute(obj: t.Any, attr: str) -> bool: + """Test if the attribute given is an internal python attribute. For + example this function returns `True` for the `func_code` attribute of + python objects. This is useful if the environment method + :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden. + + >>> from jinja2.sandbox import is_internal_attribute + >>> is_internal_attribute(str, "mro") + True + >>> is_internal_attribute(str, "upper") + False + """ + if isinstance(obj, types.FunctionType): + if attr in UNSAFE_FUNCTION_ATTRIBUTES: + return True + elif isinstance(obj, types.MethodType): + if attr in UNSAFE_FUNCTION_ATTRIBUTES or attr in UNSAFE_METHOD_ATTRIBUTES: + return True + elif isinstance(obj, type): + if attr == "mro": + return True + elif isinstance(obj, (types.CodeType, types.TracebackType, types.FrameType)): + return True + elif isinstance(obj, types.GeneratorType): + if attr in UNSAFE_GENERATOR_ATTRIBUTES: + return True + elif hasattr(types, "CoroutineType") and isinstance(obj, types.CoroutineType): + if attr in UNSAFE_COROUTINE_ATTRIBUTES: + return True + elif hasattr(types, "AsyncGeneratorType") and isinstance( + obj, types.AsyncGeneratorType + ): + if attr in UNSAFE_ASYNC_GENERATOR_ATTRIBUTES: + return True + return attr.startswith("__") + + +def modifies_known_mutable(obj: t.Any, attr: str) -> bool: + """This function checks if an attribute on a builtin mutable object + (list, dict, set or deque) or the corresponding ABCs would modify it + if called. + + >>> modifies_known_mutable({}, "clear") + True + >>> modifies_known_mutable({}, "keys") + False + >>> modifies_known_mutable([], "append") + True + >>> modifies_known_mutable([], "index") + False + + If called with an unsupported object, ``False`` is returned. + + >>> modifies_known_mutable("foo", "upper") + False + """ + for typespec, unsafe in _mutable_spec: + if isinstance(obj, typespec): + return attr in unsafe + return False + + +class SandboxedEnvironment(Environment): + """The sandboxed environment. It works like the regular environment but + tells the compiler to generate sandboxed code. Additionally subclasses of + this environment may override the methods that tell the runtime what + attributes or functions are safe to access. + + If the template tries to access insecure code a :exc:`SecurityError` is + raised. However also other exceptions may occur during the rendering so + the caller has to ensure that all exceptions are caught. + """ + + sandboxed = True + + #: default callback table for the binary operators. A copy of this is + #: available on each instance of a sandboxed environment as + #: :attr:`binop_table` + default_binop_table: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = { + "+": operator.add, + "-": operator.sub, + "*": operator.mul, + "/": operator.truediv, + "//": operator.floordiv, + "**": operator.pow, + "%": operator.mod, + } + + #: default callback table for the unary operators. A copy of this is + #: available on each instance of a sandboxed environment as + #: :attr:`unop_table` + default_unop_table: t.Dict[str, t.Callable[[t.Any], t.Any]] = { + "+": operator.pos, + "-": operator.neg, + } + + #: a set of binary operators that should be intercepted. Each operator + #: that is added to this set (empty by default) is delegated to the + #: :meth:`call_binop` method that will perform the operator. The default + #: operator callback is specified by :attr:`binop_table`. + #: + #: The following binary operators are interceptable: + #: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**`` + #: + #: The default operation form the operator table corresponds to the + #: builtin function. Intercepted calls are always slower than the native + #: operator call, so make sure only to intercept the ones you are + #: interested in. + #: + #: .. versionadded:: 2.6 + intercepted_binops: t.FrozenSet[str] = frozenset() + + #: a set of unary operators that should be intercepted. Each operator + #: that is added to this set (empty by default) is delegated to the + #: :meth:`call_unop` method that will perform the operator. The default + #: operator callback is specified by :attr:`unop_table`. + #: + #: The following unary operators are interceptable: ``+``, ``-`` + #: + #: The default operation form the operator table corresponds to the + #: builtin function. Intercepted calls are always slower than the native + #: operator call, so make sure only to intercept the ones you are + #: interested in. + #: + #: .. versionadded:: 2.6 + intercepted_unops: t.FrozenSet[str] = frozenset() + + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + super().__init__(*args, **kwargs) + self.globals["range"] = safe_range + self.binop_table = self.default_binop_table.copy() + self.unop_table = self.default_unop_table.copy() + + def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool: + """The sandboxed environment will call this method to check if the + attribute of an object is safe to access. Per default all attributes + starting with an underscore are considered private as well as the + special attributes of internal python objects as returned by the + :func:`is_internal_attribute` function. + """ + return not (attr.startswith("_") or is_internal_attribute(obj, attr)) + + def is_safe_callable(self, obj: t.Any) -> bool: + """Check if an object is safely callable. By default callables + are considered safe unless decorated with :func:`unsafe`. + + This also recognizes the Django convention of setting + ``func.alters_data = True``. + """ + return not ( + getattr(obj, "unsafe_callable", False) or getattr(obj, "alters_data", False) + ) + + def call_binop( + self, context: Context, operator: str, left: t.Any, right: t.Any + ) -> t.Any: + """For intercepted binary operator calls (:meth:`intercepted_binops`) + this function is executed instead of the builtin operator. This can + be used to fine tune the behavior of certain operators. + + .. versionadded:: 2.6 + """ + return self.binop_table[operator](left, right) + + def call_unop(self, context: Context, operator: str, arg: t.Any) -> t.Any: + """For intercepted unary operator calls (:meth:`intercepted_unops`) + this function is executed instead of the builtin operator. This can + be used to fine tune the behavior of certain operators. + + .. versionadded:: 2.6 + """ + return self.unop_table[operator](arg) + + def getitem( + self, obj: t.Any, argument: t.Union[str, t.Any] + ) -> t.Union[t.Any, Undefined]: + """Subscribe an object from sandboxed code.""" + try: + return obj[argument] + except (TypeError, LookupError): + if isinstance(argument, str): + try: + attr = str(argument) + except Exception: + pass + else: + try: + value = getattr(obj, attr) + except AttributeError: + pass + else: + if self.is_safe_attribute(obj, argument, value): + return value + return self.unsafe_undefined(obj, argument) + return self.undefined(obj=obj, name=argument) + + def getattr(self, obj: t.Any, attribute: str) -> t.Union[t.Any, Undefined]: + """Subscribe an object from sandboxed code and prefer the + attribute. The attribute passed *must* be a bytestring. + """ + try: + value = getattr(obj, attribute) + except AttributeError: + try: + return obj[attribute] + except (TypeError, LookupError): + pass + else: + if self.is_safe_attribute(obj, attribute, value): + return value + return self.unsafe_undefined(obj, attribute) + return self.undefined(obj=obj, name=attribute) + + def unsafe_undefined(self, obj: t.Any, attribute: str) -> Undefined: + """Return an undefined object for unsafe attributes.""" + return self.undefined( + f"access to attribute {attribute!r} of" + f" {type(obj).__name__!r} object is unsafe.", + name=attribute, + obj=obj, + exc=SecurityError, + ) + + def format_string( + self, + s: str, + args: t.Tuple[t.Any, ...], + kwargs: t.Dict[str, t.Any], + format_func: t.Optional[t.Callable] = None, + ) -> str: + """If a format call is detected, then this is routed through this + method so that our safety sandbox can be used for it. + """ + formatter: SandboxedFormatter + if isinstance(s, Markup): + formatter = SandboxedEscapeFormatter(self, escape=s.escape) + else: + formatter = SandboxedFormatter(self) + + if format_func is not None and format_func.__name__ == "format_map": + if len(args) != 1 or kwargs: + raise TypeError( + "format_map() takes exactly one argument" + f" {len(args) + (kwargs is not None)} given" + ) + + kwargs = args[0] + args = () + + rv = formatter.vformat(s, args, kwargs) + return type(s)(rv) + + def call( + __self, # noqa: B902 + __context: Context, + __obj: t.Any, + *args: t.Any, + **kwargs: t.Any, + ) -> t.Any: + """Call an object from sandboxed code.""" + fmt = inspect_format_method(__obj) + if fmt is not None: + return __self.format_string(fmt, args, kwargs, __obj) + + # the double prefixes are to avoid double keyword argument + # errors when proxying the call. + if not __self.is_safe_callable(__obj): + raise SecurityError(f"{__obj!r} is not safely callable") + return __context.call(__obj, *args, **kwargs) + + +class ImmutableSandboxedEnvironment(SandboxedEnvironment): + """Works exactly like the regular `SandboxedEnvironment` but does not + permit modifications on the builtin mutable objects `list`, `set`, and + `dict` by using the :func:`modifies_known_mutable` function. + """ + + def is_safe_attribute(self, obj: t.Any, attr: str, value: t.Any) -> bool: + if not super().is_safe_attribute(obj, attr, value): + return False + + return not modifies_known_mutable(obj, attr) + + +class SandboxedFormatter(Formatter): + def __init__(self, env: Environment, **kwargs: t.Any) -> None: + self._env = env + super().__init__(**kwargs) + + def get_field( + self, field_name: str, args: t.Sequence[t.Any], kwargs: t.Mapping[str, t.Any] + ) -> t.Tuple[t.Any, str]: + first, rest = formatter_field_name_split(field_name) + obj = self.get_value(first, args, kwargs) + for is_attr, i in rest: + if is_attr: + obj = self._env.getattr(obj, i) + else: + obj = self._env.getitem(obj, i) + return obj, first + + +class SandboxedEscapeFormatter(SandboxedFormatter, EscapeFormatter): + pass diff --git a/backend/test/lib/python3.8/site-packages/jinja2/tests.py b/backend/test/lib/python3.8/site-packages/jinja2/tests.py new file mode 100644 index 0000000000000000000000000000000000000000..a467cf08b54879ee734617611aef72ed946d4566 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/tests.py @@ -0,0 +1,255 @@ +"""Built-in template tests used with the ``is`` operator.""" +import operator +import typing as t +from collections import abc +from numbers import Number + +from .runtime import Undefined +from .utils import pass_environment + +if t.TYPE_CHECKING: + from .environment import Environment + + +def test_odd(value: int) -> bool: + """Return true if the variable is odd.""" + return value % 2 == 1 + + +def test_even(value: int) -> bool: + """Return true if the variable is even.""" + return value % 2 == 0 + + +def test_divisibleby(value: int, num: int) -> bool: + """Check if a variable is divisible by a number.""" + return value % num == 0 + + +def test_defined(value: t.Any) -> bool: + """Return true if the variable is defined: + + .. sourcecode:: jinja + + {% if variable is defined %} + value of variable: {{ variable }} + {% else %} + variable is not defined + {% endif %} + + See the :func:`default` filter for a simple way to set undefined + variables. + """ + return not isinstance(value, Undefined) + + +def test_undefined(value: t.Any) -> bool: + """Like :func:`defined` but the other way round.""" + return isinstance(value, Undefined) + + +@pass_environment +def test_filter(env: "Environment", value: str) -> bool: + """Check if a filter exists by name. Useful if a filter may be + optionally available. + + .. code-block:: jinja + + {% if 'markdown' is filter %} + {{ value | markdown }} + {% else %} + {{ value }} + {% endif %} + + .. versionadded:: 3.0 + """ + return value in env.filters + + +@pass_environment +def test_test(env: "Environment", value: str) -> bool: + """Check if a test exists by name. Useful if a test may be + optionally available. + + .. code-block:: jinja + + {% if 'loud' is test %} + {% if value is loud %} + {{ value|upper }} + {% else %} + {{ value|lower }} + {% endif %} + {% else %} + {{ value }} + {% endif %} + + .. versionadded:: 3.0 + """ + return value in env.tests + + +def test_none(value: t.Any) -> bool: + """Return true if the variable is none.""" + return value is None + + +def test_boolean(value: t.Any) -> bool: + """Return true if the object is a boolean value. + + .. versionadded:: 2.11 + """ + return value is True or value is False + + +def test_false(value: t.Any) -> bool: + """Return true if the object is False. + + .. versionadded:: 2.11 + """ + return value is False + + +def test_true(value: t.Any) -> bool: + """Return true if the object is True. + + .. versionadded:: 2.11 + """ + return value is True + + +# NOTE: The existing 'number' test matches booleans and floats +def test_integer(value: t.Any) -> bool: + """Return true if the object is an integer. + + .. versionadded:: 2.11 + """ + return isinstance(value, int) and value is not True and value is not False + + +# NOTE: The existing 'number' test matches booleans and integers +def test_float(value: t.Any) -> bool: + """Return true if the object is a float. + + .. versionadded:: 2.11 + """ + return isinstance(value, float) + + +def test_lower(value: str) -> bool: + """Return true if the variable is lowercased.""" + return str(value).islower() + + +def test_upper(value: str) -> bool: + """Return true if the variable is uppercased.""" + return str(value).isupper() + + +def test_string(value: t.Any) -> bool: + """Return true if the object is a string.""" + return isinstance(value, str) + + +def test_mapping(value: t.Any) -> bool: + """Return true if the object is a mapping (dict etc.). + + .. versionadded:: 2.6 + """ + return isinstance(value, abc.Mapping) + + +def test_number(value: t.Any) -> bool: + """Return true if the variable is a number.""" + return isinstance(value, Number) + + +def test_sequence(value: t.Any) -> bool: + """Return true if the variable is a sequence. Sequences are variables + that are iterable. + """ + try: + len(value) + value.__getitem__ + except Exception: + return False + + return True + + +def test_sameas(value: t.Any, other: t.Any) -> bool: + """Check if an object points to the same memory address than another + object: + + .. sourcecode:: jinja + + {% if foo.attribute is sameas false %} + the foo attribute really is the `False` singleton + {% endif %} + """ + return value is other + + +def test_iterable(value: t.Any) -> bool: + """Check if it's possible to iterate over an object.""" + try: + iter(value) + except TypeError: + return False + + return True + + +def test_escaped(value: t.Any) -> bool: + """Check if the value is escaped.""" + return hasattr(value, "__html__") + + +def test_in(value: t.Any, seq: t.Container) -> bool: + """Check if value is in seq. + + .. versionadded:: 2.10 + """ + return value in seq + + +TESTS = { + "odd": test_odd, + "even": test_even, + "divisibleby": test_divisibleby, + "defined": test_defined, + "undefined": test_undefined, + "filter": test_filter, + "test": test_test, + "none": test_none, + "boolean": test_boolean, + "false": test_false, + "true": test_true, + "integer": test_integer, + "float": test_float, + "lower": test_lower, + "upper": test_upper, + "string": test_string, + "mapping": test_mapping, + "number": test_number, + "sequence": test_sequence, + "iterable": test_iterable, + "callable": callable, + "sameas": test_sameas, + "escaped": test_escaped, + "in": test_in, + "==": operator.eq, + "eq": operator.eq, + "equalto": operator.eq, + "!=": operator.ne, + "ne": operator.ne, + ">": operator.gt, + "gt": operator.gt, + "greaterthan": operator.gt, + "ge": operator.ge, + ">=": operator.ge, + "<": operator.lt, + "lt": operator.lt, + "lessthan": operator.lt, + "<=": operator.le, + "le": operator.le, +} diff --git a/backend/test/lib/python3.8/site-packages/jinja2/utils.py b/backend/test/lib/python3.8/site-packages/jinja2/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9b5f5a50eb6773c4085f8572a45b3fa351367565 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/utils.py @@ -0,0 +1,755 @@ +import enum +import json +import os +import re +import typing as t +from collections import abc +from collections import deque +from random import choice +from random import randrange +from threading import Lock +from types import CodeType +from urllib.parse import quote_from_bytes + +import markupsafe + +if t.TYPE_CHECKING: + import typing_extensions as te + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + +# special singleton representing missing values for the runtime +missing: t.Any = type("MissingType", (), {"__repr__": lambda x: "missing"})() + +internal_code: t.MutableSet[CodeType] = set() + +concat = "".join + + +def pass_context(f: F) -> F: + """Pass the :class:`~jinja2.runtime.Context` as the first argument + to the decorated function when called while rendering a template. + + Can be used on functions, filters, and tests. + + If only ``Context.eval_context`` is needed, use + :func:`pass_eval_context`. If only ``Context.environment`` is + needed, use :func:`pass_environment`. + + .. versionadded:: 3.0.0 + Replaces ``contextfunction`` and ``contextfilter``. + """ + f.jinja_pass_arg = _PassArg.context # type: ignore + return f + + +def pass_eval_context(f: F) -> F: + """Pass the :class:`~jinja2.nodes.EvalContext` as the first argument + to the decorated function when called while rendering a template. + See :ref:`eval-context`. + + Can be used on functions, filters, and tests. + + If only ``EvalContext.environment`` is needed, use + :func:`pass_environment`. + + .. versionadded:: 3.0.0 + Replaces ``evalcontextfunction`` and ``evalcontextfilter``. + """ + f.jinja_pass_arg = _PassArg.eval_context # type: ignore + return f + + +def pass_environment(f: F) -> F: + """Pass the :class:`~jinja2.Environment` as the first argument to + the decorated function when called while rendering a template. + + Can be used on functions, filters, and tests. + + .. versionadded:: 3.0.0 + Replaces ``environmentfunction`` and ``environmentfilter``. + """ + f.jinja_pass_arg = _PassArg.environment # type: ignore + return f + + +class _PassArg(enum.Enum): + context = enum.auto() + eval_context = enum.auto() + environment = enum.auto() + + @classmethod + def from_obj(cls, obj: F) -> t.Optional["_PassArg"]: + if hasattr(obj, "jinja_pass_arg"): + return obj.jinja_pass_arg # type: ignore + + return None + + +def internalcode(f: F) -> F: + """Marks the function as internally used""" + internal_code.add(f.__code__) + return f + + +def is_undefined(obj: t.Any) -> bool: + """Check if the object passed is undefined. This does nothing more than + performing an instance check against :class:`Undefined` but looks nicer. + This can be used for custom filters or tests that want to react to + undefined variables. For example a custom default filter can look like + this:: + + def default(var, default=''): + if is_undefined(var): + return default + return var + """ + from .runtime import Undefined + + return isinstance(obj, Undefined) + + +def consume(iterable: t.Iterable[t.Any]) -> None: + """Consumes an iterable without doing anything with it.""" + for _ in iterable: + pass + + +def clear_caches() -> None: + """Jinja keeps internal caches for environments and lexers. These are + used so that Jinja doesn't have to recreate environments and lexers all + the time. Normally you don't have to care about that but if you are + measuring memory consumption you may want to clean the caches. + """ + from .environment import get_spontaneous_environment + from .lexer import _lexer_cache + + get_spontaneous_environment.cache_clear() + _lexer_cache.clear() + + +def import_string(import_name: str, silent: bool = False) -> t.Any: + """Imports an object based on a string. This is useful if you want to + use import paths as endpoints or something similar. An import path can + be specified either in dotted notation (``xml.sax.saxutils.escape``) + or with a colon as object delimiter (``xml.sax.saxutils:escape``). + + If the `silent` is True the return value will be `None` if the import + fails. + + :return: imported object + """ + try: + if ":" in import_name: + module, obj = import_name.split(":", 1) + elif "." in import_name: + module, _, obj = import_name.rpartition(".") + else: + return __import__(import_name) + return getattr(__import__(module, None, None, [obj]), obj) + except (ImportError, AttributeError): + if not silent: + raise + + +def open_if_exists(filename: str, mode: str = "rb") -> t.Optional[t.IO]: + """Returns a file descriptor for the filename if that file exists, + otherwise ``None``. + """ + if not os.path.isfile(filename): + return None + + return open(filename, mode) + + +def object_type_repr(obj: t.Any) -> str: + """Returns the name of the object's type. For some recognized + singletons the name of the object is returned instead. (For + example for `None` and `Ellipsis`). + """ + if obj is None: + return "None" + elif obj is Ellipsis: + return "Ellipsis" + + cls = type(obj) + + if cls.__module__ == "builtins": + return f"{cls.__name__} object" + + return f"{cls.__module__}.{cls.__name__} object" + + +def pformat(obj: t.Any) -> str: + """Format an object using :func:`pprint.pformat`.""" + from pprint import pformat # type: ignore + + return pformat(obj) + + +_http_re = re.compile( + r""" + ^ + ( + (https?://|www\.) # scheme or www + (([\w%-]+\.)+)? # subdomain + ( + [a-z]{2,63} # basic tld + | + xn--[\w%]{2,59} # idna tld + ) + | + ([\w%-]{2,63}\.)+ # basic domain + (com|net|int|edu|gov|org|info|mil) # basic tld + | + (https?://) # scheme + ( + (([\d]{1,3})(\.[\d]{1,3}){3}) # IPv4 + | + (\[([\da-f]{0,4}:){2}([\da-f]{0,4}:?){1,6}]) # IPv6 + ) + ) + (?::[\d]{1,5})? # port + (?:[/?#]\S*)? # path, query, and fragment + $ + """, + re.IGNORECASE | re.VERBOSE, +) +_email_re = re.compile(r"^\S+@\w[\w.-]*\.\w+$") + + +def urlize( + text: str, + trim_url_limit: t.Optional[int] = None, + rel: t.Optional[str] = None, + target: t.Optional[str] = None, + extra_schemes: t.Optional[t.Iterable[str]] = None, +) -> str: + """Convert URLs in text into clickable links. + + This may not recognize links in some situations. Usually, a more + comprehensive formatter, such as a Markdown library, is a better + choice. + + Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email + addresses. Links with trailing punctuation (periods, commas, closing + parentheses) and leading punctuation (opening parentheses) are + recognized excluding the punctuation. Email addresses that include + header fields are not recognized (for example, + ``mailto:address@example.com?cc=copy@example.com``). + + :param text: Original text containing URLs to link. + :param trim_url_limit: Shorten displayed URL values to this length. + :param target: Add the ``target`` attribute to links. + :param rel: Add the ``rel`` attribute to links. + :param extra_schemes: Recognize URLs that start with these schemes + in addition to the default behavior. + + .. versionchanged:: 3.0 + The ``extra_schemes`` parameter was added. + + .. versionchanged:: 3.0 + Generate ``https://`` links for URLs without a scheme. + + .. versionchanged:: 3.0 + The parsing rules were updated. Recognize email addresses with + or without the ``mailto:`` scheme. Validate IP addresses. Ignore + parentheses and brackets in more cases. + """ + if trim_url_limit is not None: + + def trim_url(x: str) -> str: + if len(x) > trim_url_limit: # type: ignore + return f"{x[:trim_url_limit]}..." + + return x + + else: + + def trim_url(x: str) -> str: + return x + + words = re.split(r"(\s+)", str(markupsafe.escape(text))) + rel_attr = f' rel="{markupsafe.escape(rel)}"' if rel else "" + target_attr = f' target="{markupsafe.escape(target)}"' if target else "" + + for i, word in enumerate(words): + head, middle, tail = "", word, "" + match = re.match(r"^([(<]|<)+", middle) + + if match: + head = match.group() + middle = middle[match.end() :] + + # Unlike lead, which is anchored to the start of the string, + # need to check that the string ends with any of the characters + # before trying to match all of them, to avoid backtracking. + if middle.endswith((")", ">", ".", ",", "\n", ">")): + match = re.search(r"([)>.,\n]|>)+$", middle) + + if match: + tail = match.group() + middle = middle[: match.start()] + + # Prefer balancing parentheses in URLs instead of ignoring a + # trailing character. + for start_char, end_char in ("(", ")"), ("<", ">"), ("<", ">"): + start_count = middle.count(start_char) + + if start_count <= middle.count(end_char): + # Balanced, or lighter on the left + continue + + # Move as many as possible from the tail to balance + for _ in range(min(start_count, tail.count(end_char))): + end_index = tail.index(end_char) + len(end_char) + # Move anything in the tail before the end char too + middle += tail[:end_index] + tail = tail[end_index:] + + if _http_re.match(middle): + if middle.startswith("https://") or middle.startswith("http://"): + middle = ( + f'<a href="{middle}"{rel_attr}{target_attr}>{trim_url(middle)}</a>' + ) + else: + middle = ( + f'<a href="https://{middle}"{rel_attr}{target_attr}>' + f"{trim_url(middle)}</a>" + ) + + elif middle.startswith("mailto:") and _email_re.match(middle[7:]): + middle = f'<a href="{middle}">{middle[7:]}</a>' + + elif ( + "@" in middle + and not middle.startswith("www.") + and ":" not in middle + and _email_re.match(middle) + ): + middle = f'<a href="mailto:{middle}">{middle}</a>' + + elif extra_schemes is not None: + for scheme in extra_schemes: + if middle != scheme and middle.startswith(scheme): + middle = f'<a href="{middle}"{rel_attr}{target_attr}>{middle}</a>' + + words[i] = f"{head}{middle}{tail}" + + return "".join(words) + + +def generate_lorem_ipsum( + n: int = 5, html: bool = True, min: int = 20, max: int = 100 +) -> str: + """Generate some lorem ipsum for the template.""" + from .constants import LOREM_IPSUM_WORDS + + words = LOREM_IPSUM_WORDS.split() + result = [] + + for _ in range(n): + next_capitalized = True + last_comma = last_fullstop = 0 + word = None + last = None + p = [] + + # each paragraph contains out of 20 to 100 words. + for idx, _ in enumerate(range(randrange(min, max))): + while True: + word = choice(words) + if word != last: + last = word + break + if next_capitalized: + word = word.capitalize() + next_capitalized = False + # add commas + if idx - randrange(3, 8) > last_comma: + last_comma = idx + last_fullstop += 2 + word += "," + # add end of sentences + if idx - randrange(10, 20) > last_fullstop: + last_comma = last_fullstop = idx + word += "." + next_capitalized = True + p.append(word) + + # ensure that the paragraph ends with a dot. + p_str = " ".join(p) + + if p_str.endswith(","): + p_str = p_str[:-1] + "." + elif not p_str.endswith("."): + p_str += "." + + result.append(p_str) + + if not html: + return "\n\n".join(result) + return markupsafe.Markup( + "\n".join(f"<p>{markupsafe.escape(x)}</p>" for x in result) + ) + + +def url_quote(obj: t.Any, charset: str = "utf-8", for_qs: bool = False) -> str: + """Quote a string for use in a URL using the given charset. + + :param obj: String or bytes to quote. Other types are converted to + string then encoded to bytes using the given charset. + :param charset: Encode text to bytes using this charset. + :param for_qs: Quote "/" and use "+" for spaces. + """ + if not isinstance(obj, bytes): + if not isinstance(obj, str): + obj = str(obj) + + obj = obj.encode(charset) + + safe = b"" if for_qs else b"/" + rv = quote_from_bytes(obj, safe) + + if for_qs: + rv = rv.replace("%20", "+") + + return rv + + +@abc.MutableMapping.register +class LRUCache: + """A simple LRU Cache implementation.""" + + # this is fast for small capacities (something below 1000) but doesn't + # scale. But as long as it's only used as storage for templates this + # won't do any harm. + + def __init__(self, capacity: int) -> None: + self.capacity = capacity + self._mapping: t.Dict[t.Any, t.Any] = {} + self._queue: "te.Deque[t.Any]" = deque() + self._postinit() + + def _postinit(self) -> None: + # alias all queue methods for faster lookup + self._popleft = self._queue.popleft + self._pop = self._queue.pop + self._remove = self._queue.remove + self._wlock = Lock() + self._append = self._queue.append + + def __getstate__(self) -> t.Mapping[str, t.Any]: + return { + "capacity": self.capacity, + "_mapping": self._mapping, + "_queue": self._queue, + } + + def __setstate__(self, d: t.Mapping[str, t.Any]) -> None: + self.__dict__.update(d) + self._postinit() + + def __getnewargs__(self) -> t.Tuple: + return (self.capacity,) + + def copy(self) -> "LRUCache": + """Return a shallow copy of the instance.""" + rv = self.__class__(self.capacity) + rv._mapping.update(self._mapping) + rv._queue.extend(self._queue) + return rv + + def get(self, key: t.Any, default: t.Any = None) -> t.Any: + """Return an item from the cache dict or `default`""" + try: + return self[key] + except KeyError: + return default + + def setdefault(self, key: t.Any, default: t.Any = None) -> t.Any: + """Set `default` if the key is not in the cache otherwise + leave unchanged. Return the value of this key. + """ + try: + return self[key] + except KeyError: + self[key] = default + return default + + def clear(self) -> None: + """Clear the cache.""" + with self._wlock: + self._mapping.clear() + self._queue.clear() + + def __contains__(self, key: t.Any) -> bool: + """Check if a key exists in this cache.""" + return key in self._mapping + + def __len__(self) -> int: + """Return the current size of the cache.""" + return len(self._mapping) + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self._mapping!r}>" + + def __getitem__(self, key: t.Any) -> t.Any: + """Get an item from the cache. Moves the item up so that it has the + highest priority then. + + Raise a `KeyError` if it does not exist. + """ + with self._wlock: + rv = self._mapping[key] + + if self._queue[-1] != key: + try: + self._remove(key) + except ValueError: + # if something removed the key from the container + # when we read, ignore the ValueError that we would + # get otherwise. + pass + + self._append(key) + + return rv + + def __setitem__(self, key: t.Any, value: t.Any) -> None: + """Sets the value for an item. Moves the item up so that it + has the highest priority then. + """ + with self._wlock: + if key in self._mapping: + self._remove(key) + elif len(self._mapping) == self.capacity: + del self._mapping[self._popleft()] + + self._append(key) + self._mapping[key] = value + + def __delitem__(self, key: t.Any) -> None: + """Remove an item from the cache dict. + Raise a `KeyError` if it does not exist. + """ + with self._wlock: + del self._mapping[key] + + try: + self._remove(key) + except ValueError: + pass + + def items(self) -> t.Iterable[t.Tuple[t.Any, t.Any]]: + """Return a list of items.""" + result = [(key, self._mapping[key]) for key in list(self._queue)] + result.reverse() + return result + + def values(self) -> t.Iterable[t.Any]: + """Return a list of all values.""" + return [x[1] for x in self.items()] + + def keys(self) -> t.Iterable[t.Any]: + """Return a list of all keys ordered by most recent usage.""" + return list(self) + + def __iter__(self) -> t.Iterator[t.Any]: + return reversed(tuple(self._queue)) + + def __reversed__(self) -> t.Iterator[t.Any]: + """Iterate over the keys in the cache dict, oldest items + coming first. + """ + return iter(tuple(self._queue)) + + __copy__ = copy + + +def select_autoescape( + enabled_extensions: t.Collection[str] = ("html", "htm", "xml"), + disabled_extensions: t.Collection[str] = (), + default_for_string: bool = True, + default: bool = False, +) -> t.Callable[[t.Optional[str]], bool]: + """Intelligently sets the initial value of autoescaping based on the + filename of the template. This is the recommended way to configure + autoescaping if you do not want to write a custom function yourself. + + If you want to enable it for all templates created from strings or + for all templates with `.html` and `.xml` extensions:: + + from jinja2 import Environment, select_autoescape + env = Environment(autoescape=select_autoescape( + enabled_extensions=('html', 'xml'), + default_for_string=True, + )) + + Example configuration to turn it on at all times except if the template + ends with `.txt`:: + + from jinja2 import Environment, select_autoescape + env = Environment(autoescape=select_autoescape( + disabled_extensions=('txt',), + default_for_string=True, + default=True, + )) + + The `enabled_extensions` is an iterable of all the extensions that + autoescaping should be enabled for. Likewise `disabled_extensions` is + a list of all templates it should be disabled for. If a template is + loaded from a string then the default from `default_for_string` is used. + If nothing matches then the initial value of autoescaping is set to the + value of `default`. + + For security reasons this function operates case insensitive. + + .. versionadded:: 2.9 + """ + enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions) + disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions) + + def autoescape(template_name: t.Optional[str]) -> bool: + if template_name is None: + return default_for_string + template_name = template_name.lower() + if template_name.endswith(enabled_patterns): + return True + if template_name.endswith(disabled_patterns): + return False + return default + + return autoescape + + +def htmlsafe_json_dumps( + obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any +) -> markupsafe.Markup: + """Serialize an object to a string of JSON with :func:`json.dumps`, + then replace HTML-unsafe characters with Unicode escapes and mark + the result safe with :class:`~markupsafe.Markup`. + + This is available in templates as the ``|tojson`` filter. + + The following characters are escaped: ``<``, ``>``, ``&``, ``'``. + + The returned string is safe to render in HTML documents and + ``<script>`` tags. The exception is in HTML attributes that are + double quoted; either use single quotes or the ``|forceescape`` + filter. + + :param obj: The object to serialize to JSON. + :param dumps: The ``dumps`` function to use. Defaults to + ``env.policies["json.dumps_function"]``, which defaults to + :func:`json.dumps`. + :param kwargs: Extra arguments to pass to ``dumps``. Merged onto + ``env.policies["json.dumps_kwargs"]``. + + .. versionchanged:: 3.0 + The ``dumper`` parameter is renamed to ``dumps``. + + .. versionadded:: 2.9 + """ + if dumps is None: + dumps = json.dumps + + return markupsafe.Markup( + dumps(obj, **kwargs) + .replace("<", "\\u003c") + .replace(">", "\\u003e") + .replace("&", "\\u0026") + .replace("'", "\\u0027") + ) + + +class Cycler: + """Cycle through values by yield them one at a time, then restarting + once the end is reached. Available as ``cycler`` in templates. + + Similar to ``loop.cycle``, but can be used outside loops or across + multiple loops. For example, render a list of folders and files in a + list, alternating giving them "odd" and "even" classes. + + .. code-block:: html+jinja + + {% set row_class = cycler("odd", "even") %} + <ul class="browser"> + {% for folder in folders %} + <li class="folder {{ row_class.next() }}">{{ folder }} + {% endfor %} + {% for file in files %} + <li class="file {{ row_class.next() }}">{{ file }} + {% endfor %} + </ul> + + :param items: Each positional argument will be yielded in the order + given for each cycle. + + .. versionadded:: 2.1 + """ + + def __init__(self, *items: t.Any) -> None: + if not items: + raise RuntimeError("at least one item has to be provided") + self.items = items + self.pos = 0 + + def reset(self) -> None: + """Resets the current item to the first item.""" + self.pos = 0 + + @property + def current(self) -> t.Any: + """Return the current item. Equivalent to the item that will be + returned next time :meth:`next` is called. + """ + return self.items[self.pos] + + def next(self) -> t.Any: + """Return the current item, then advance :attr:`current` to the + next item. + """ + rv = self.current + self.pos = (self.pos + 1) % len(self.items) + return rv + + __next__ = next + + +class Joiner: + """A joining helper for templates.""" + + def __init__(self, sep: str = ", ") -> None: + self.sep = sep + self.used = False + + def __call__(self) -> str: + if not self.used: + self.used = True + return "" + return self.sep + + +class Namespace: + """A namespace object that can hold arbitrary attributes. It may be + initialized from a dictionary or with keyword arguments.""" + + def __init__(*args: t.Any, **kwargs: t.Any) -> None: # noqa: B902 + self, args = args[0], args[1:] + self.__attrs = dict(*args, **kwargs) + + def __getattribute__(self, name: str) -> t.Any: + # __class__ is needed for the awaitable check in async mode + if name in {"_Namespace__attrs", "__class__"}: + return object.__getattribute__(self, name) + try: + return self.__attrs[name] + except KeyError: + raise AttributeError(name) from None + + def __setitem__(self, name: str, value: t.Any) -> None: + self.__attrs[name] = value + + def __repr__(self) -> str: + return f"<Namespace {self.__attrs!r}>" diff --git a/backend/test/lib/python3.8/site-packages/jinja2/visitor.py b/backend/test/lib/python3.8/site-packages/jinja2/visitor.py new file mode 100644 index 0000000000000000000000000000000000000000..17c6aaba570742652f70bf1e7bf1a576c9d256ae --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/jinja2/visitor.py @@ -0,0 +1,92 @@ +"""API for traversing the AST nodes. Implemented by the compiler and +meta introspection. +""" +import typing as t + +from .nodes import Node + +if t.TYPE_CHECKING: + import typing_extensions as te + + class VisitCallable(te.Protocol): + def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: + ... + + +class NodeVisitor: + """Walks the abstract syntax tree and call visitor functions for every + node found. The visitor functions may return values which will be + forwarded by the `visit` method. + + Per default the visitor functions for the nodes are ``'visit_'`` + + class name of the node. So a `TryFinally` node visit function would + be `visit_TryFinally`. This behavior can be changed by overriding + the `get_visitor` function. If no visitor function exists for a node + (return value `None`) the `generic_visit` visitor is used instead. + """ + + def get_visitor(self, node: Node) -> "t.Optional[VisitCallable]": + """Return the visitor function for this node or `None` if no visitor + exists for this node. In that case the generic visit function is + used instead. + """ + return getattr(self, f"visit_{type(node).__name__}", None) + + def visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Visit a node.""" + f = self.get_visitor(node) + + if f is not None: + return f(node, *args, **kwargs) + + return self.generic_visit(node, *args, **kwargs) + + def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Called if no explicit visitor function exists for a node.""" + for child_node in node.iter_child_nodes(): + self.visit(child_node, *args, **kwargs) + + +class NodeTransformer(NodeVisitor): + """Walks the abstract syntax tree and allows modifications of nodes. + + The `NodeTransformer` will walk the AST and use the return value of the + visitor functions to replace or remove the old node. If the return + value of the visitor function is `None` the node will be removed + from the previous location otherwise it's replaced with the return + value. The return value may be the original node in which case no + replacement takes place. + """ + + def generic_visit(self, node: Node, *args: t.Any, **kwargs: t.Any) -> Node: + for field, old_value in node.iter_fields(): + if isinstance(old_value, list): + new_values = [] + for value in old_value: + if isinstance(value, Node): + value = self.visit(value, *args, **kwargs) + if value is None: + continue + elif not isinstance(value, Node): + new_values.extend(value) + continue + new_values.append(value) + old_value[:] = new_values + elif isinstance(old_value, Node): + new_node = self.visit(old_value, *args, **kwargs) + if new_node is None: + delattr(node, field) + else: + setattr(node, field, new_node) + return node + + def visit_list(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.List[Node]: + """As transformers may return lists in some places this method + can be used to enforce a list as return value. + """ + rv = self.visit(node, *args, **kwargs) + + if not isinstance(rv, list): + return [rv] + + return rv diff --git a/backend/test/lib/python3.8/site-packages/markupsafe/__init__.py b/backend/test/lib/python3.8/site-packages/markupsafe/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..21d31960385611ad029826a36417fe50a17ca557 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/markupsafe/__init__.py @@ -0,0 +1,304 @@ +import functools +import re +import string +import sys +import typing as t + +if t.TYPE_CHECKING: + import typing_extensions as te + + class HasHTML(te.Protocol): + def __html__(self) -> str: + pass + + _P = te.ParamSpec("_P") + + +__version__ = "2.1.3" + +_strip_comments_re = re.compile(r"<!--.*?-->", re.DOTALL) +_strip_tags_re = re.compile(r"<.*?>", re.DOTALL) + + +def _simple_escaping_wrapper(func: "t.Callable[_P, str]") -> "t.Callable[_P, Markup]": + @functools.wraps(func) + def wrapped(self: "Markup", *args: "_P.args", **kwargs: "_P.kwargs") -> "Markup": + arg_list = _escape_argspec(list(args), enumerate(args), self.escape) + _escape_argspec(kwargs, kwargs.items(), self.escape) + return self.__class__(func(self, *arg_list, **kwargs)) # type: ignore[arg-type] + + return wrapped # type: ignore[return-value] + + +class Markup(str): + """A string that is ready to be safely inserted into an HTML or XML + document, either because it was escaped or because it was marked + safe. + + Passing an object to the constructor converts it to text and wraps + it to mark it safe without escaping. To escape the text, use the + :meth:`escape` class method instead. + + >>> Markup("Hello, <em>World</em>!") + Markup('Hello, <em>World</em>!') + >>> Markup(42) + Markup('42') + >>> Markup.escape("Hello, <em>World</em>!") + Markup('Hello <em>World</em>!') + + This implements the ``__html__()`` interface that some frameworks + use. Passing an object that implements ``__html__()`` will wrap the + output of that method, marking it safe. + + >>> class Foo: + ... def __html__(self): + ... return '<a href="/foo">foo</a>' + ... + >>> Markup(Foo()) + Markup('<a href="/foo">foo</a>') + + This is a subclass of :class:`str`. It has the same methods, but + escapes their arguments and returns a ``Markup`` instance. + + >>> Markup("<em>%s</em>") % ("foo & bar",) + Markup('<em>foo & bar</em>') + >>> Markup("<em>Hello</em> ") + "<foo>" + Markup('<em>Hello</em> <foo>') + """ + + __slots__ = () + + def __new__( + cls, base: t.Any = "", encoding: t.Optional[str] = None, errors: str = "strict" + ) -> "te.Self": + if hasattr(base, "__html__"): + base = base.__html__() + + if encoding is None: + return super().__new__(cls, base) + + return super().__new__(cls, base, encoding, errors) + + def __html__(self) -> "te.Self": + return self + + def __add__(self, other: t.Union[str, "HasHTML"]) -> "te.Self": + if isinstance(other, str) or hasattr(other, "__html__"): + return self.__class__(super().__add__(self.escape(other))) + + return NotImplemented + + def __radd__(self, other: t.Union[str, "HasHTML"]) -> "te.Self": + if isinstance(other, str) or hasattr(other, "__html__"): + return self.escape(other).__add__(self) + + return NotImplemented + + def __mul__(self, num: "te.SupportsIndex") -> "te.Self": + if isinstance(num, int): + return self.__class__(super().__mul__(num)) + + return NotImplemented + + __rmul__ = __mul__ + + def __mod__(self, arg: t.Any) -> "te.Self": + if isinstance(arg, tuple): + # a tuple of arguments, each wrapped + arg = tuple(_MarkupEscapeHelper(x, self.escape) for x in arg) + elif hasattr(type(arg), "__getitem__") and not isinstance(arg, str): + # a mapping of arguments, wrapped + arg = _MarkupEscapeHelper(arg, self.escape) + else: + # a single argument, wrapped with the helper and a tuple + arg = (_MarkupEscapeHelper(arg, self.escape),) + + return self.__class__(super().__mod__(arg)) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({super().__repr__()})" + + def join(self, seq: t.Iterable[t.Union[str, "HasHTML"]]) -> "te.Self": + return self.__class__(super().join(map(self.escape, seq))) + + join.__doc__ = str.join.__doc__ + + def split( # type: ignore[override] + self, sep: t.Optional[str] = None, maxsplit: int = -1 + ) -> t.List["te.Self"]: + return [self.__class__(v) for v in super().split(sep, maxsplit)] + + split.__doc__ = str.split.__doc__ + + def rsplit( # type: ignore[override] + self, sep: t.Optional[str] = None, maxsplit: int = -1 + ) -> t.List["te.Self"]: + return [self.__class__(v) for v in super().rsplit(sep, maxsplit)] + + rsplit.__doc__ = str.rsplit.__doc__ + + def splitlines( # type: ignore[override] + self, keepends: bool = False + ) -> t.List["te.Self"]: + return [self.__class__(v) for v in super().splitlines(keepends)] + + splitlines.__doc__ = str.splitlines.__doc__ + + def unescape(self) -> str: + """Convert escaped markup back into a text string. This replaces + HTML entities with the characters they represent. + + >>> Markup("Main » <em>About</em>").unescape() + 'Main » <em>About</em>' + """ + from html import unescape + + return unescape(str(self)) + + def striptags(self) -> str: + """:meth:`unescape` the markup, remove tags, and normalize + whitespace to single spaces. + + >>> Markup("Main »\t<em>About</em>").striptags() + 'Main » About' + """ + # Use two regexes to avoid ambiguous matches. + value = _strip_comments_re.sub("", self) + value = _strip_tags_re.sub("", value) + value = " ".join(value.split()) + return self.__class__(value).unescape() + + @classmethod + def escape(cls, s: t.Any) -> "te.Self": + """Escape a string. Calls :func:`escape` and ensures that for + subclasses the correct type is returned. + """ + rv = escape(s) + + if rv.__class__ is not cls: + return cls(rv) + + return rv # type: ignore[return-value] + + __getitem__ = _simple_escaping_wrapper(str.__getitem__) + capitalize = _simple_escaping_wrapper(str.capitalize) + title = _simple_escaping_wrapper(str.title) + lower = _simple_escaping_wrapper(str.lower) + upper = _simple_escaping_wrapper(str.upper) + replace = _simple_escaping_wrapper(str.replace) + ljust = _simple_escaping_wrapper(str.ljust) + rjust = _simple_escaping_wrapper(str.rjust) + lstrip = _simple_escaping_wrapper(str.lstrip) + rstrip = _simple_escaping_wrapper(str.rstrip) + center = _simple_escaping_wrapper(str.center) + strip = _simple_escaping_wrapper(str.strip) + translate = _simple_escaping_wrapper(str.translate) + expandtabs = _simple_escaping_wrapper(str.expandtabs) + swapcase = _simple_escaping_wrapper(str.swapcase) + zfill = _simple_escaping_wrapper(str.zfill) + casefold = _simple_escaping_wrapper(str.casefold) + + if sys.version_info >= (3, 9): + removeprefix = _simple_escaping_wrapper(str.removeprefix) + removesuffix = _simple_escaping_wrapper(str.removesuffix) + + def partition(self, sep: str) -> t.Tuple["te.Self", "te.Self", "te.Self"]: + l, s, r = super().partition(self.escape(sep)) + cls = self.__class__ + return cls(l), cls(s), cls(r) + + def rpartition(self, sep: str) -> t.Tuple["te.Self", "te.Self", "te.Self"]: + l, s, r = super().rpartition(self.escape(sep)) + cls = self.__class__ + return cls(l), cls(s), cls(r) + + def format(self, *args: t.Any, **kwargs: t.Any) -> "te.Self": + formatter = EscapeFormatter(self.escape) + return self.__class__(formatter.vformat(self, args, kwargs)) + + def format_map( # type: ignore[override] + self, map: t.Mapping[str, t.Any] + ) -> "te.Self": + formatter = EscapeFormatter(self.escape) + return self.__class__(formatter.vformat(self, (), map)) + + def __html_format__(self, format_spec: str) -> "te.Self": + if format_spec: + raise ValueError("Unsupported format specification for Markup.") + + return self + + +class EscapeFormatter(string.Formatter): + __slots__ = ("escape",) + + def __init__(self, escape: t.Callable[[t.Any], Markup]) -> None: + self.escape = escape + super().__init__() + + def format_field(self, value: t.Any, format_spec: str) -> str: + if hasattr(value, "__html_format__"): + rv = value.__html_format__(format_spec) + elif hasattr(value, "__html__"): + if format_spec: + raise ValueError( + f"Format specifier {format_spec} given, but {type(value)} does not" + " define __html_format__. A class that defines __html__ must define" + " __html_format__ to work with format specifiers." + ) + rv = value.__html__() + else: + # We need to make sure the format spec is str here as + # otherwise the wrong callback methods are invoked. + rv = string.Formatter.format_field(self, value, str(format_spec)) + return str(self.escape(rv)) + + +_ListOrDict = t.TypeVar("_ListOrDict", list, dict) + + +def _escape_argspec( + obj: _ListOrDict, iterable: t.Iterable[t.Any], escape: t.Callable[[t.Any], Markup] +) -> _ListOrDict: + """Helper for various string-wrapped functions.""" + for key, value in iterable: + if isinstance(value, str) or hasattr(value, "__html__"): + obj[key] = escape(value) + + return obj + + +class _MarkupEscapeHelper: + """Helper for :meth:`Markup.__mod__`.""" + + __slots__ = ("obj", "escape") + + def __init__(self, obj: t.Any, escape: t.Callable[[t.Any], Markup]) -> None: + self.obj = obj + self.escape = escape + + def __getitem__(self, item: t.Any) -> "te.Self": + return self.__class__(self.obj[item], self.escape) + + def __str__(self) -> str: + return str(self.escape(self.obj)) + + def __repr__(self) -> str: + return str(self.escape(repr(self.obj))) + + def __int__(self) -> int: + return int(self.obj) + + def __float__(self) -> float: + return float(self.obj) + + +# circular import +try: + from ._speedups import escape as escape + from ._speedups import escape_silent as escape_silent + from ._speedups import soft_str as soft_str +except ImportError: + from ._native import escape as escape + from ._native import escape_silent as escape_silent # noqa: F401 + from ._native import soft_str as soft_str # noqa: F401 diff --git a/backend/test/lib/python3.8/site-packages/markupsafe/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/markupsafe/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46a3b9a36b925de16c6ca6fdcb6d315abcacbd80 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/markupsafe/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/markupsafe/__pycache__/_native.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/markupsafe/__pycache__/_native.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5c42945bdda14588d908a596ce8b0c248c1da325 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/markupsafe/__pycache__/_native.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/markupsafe/_native.py b/backend/test/lib/python3.8/site-packages/markupsafe/_native.py new file mode 100644 index 0000000000000000000000000000000000000000..8117b2716d110074d9a81365c59343e81396b7f5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/markupsafe/_native.py @@ -0,0 +1,63 @@ +import typing as t + +from . import Markup + + +def escape(s: t.Any) -> Markup: + """Replace the characters ``&``, ``<``, ``>``, ``'``, and ``"`` in + the string with HTML-safe sequences. Use this if you need to display + text that might contain such characters in HTML. + + If the object has an ``__html__`` method, it is called and the + return value is assumed to already be safe for HTML. + + :param s: An object to be converted to a string and escaped. + :return: A :class:`Markup` string with the escaped text. + """ + if hasattr(s, "__html__"): + return Markup(s.__html__()) + + return Markup( + str(s) + .replace("&", "&") + .replace(">", ">") + .replace("<", "<") + .replace("'", "'") + .replace('"', """) + ) + + +def escape_silent(s: t.Optional[t.Any]) -> Markup: + """Like :func:`escape` but treats ``None`` as the empty string. + Useful with optional values, as otherwise you get the string + ``'None'`` when the value is ``None``. + + >>> escape(None) + Markup('None') + >>> escape_silent(None) + Markup('') + """ + if s is None: + return Markup() + + return escape(s) + + +def soft_str(s: t.Any) -> str: + """Convert an object to a string if it isn't already. This preserves + a :class:`Markup` string rather than converting it back to a basic + string, so it will still be marked as safe and won't be escaped + again. + + >>> value = escape("<User 1>") + >>> value + Markup('<User 1>') + >>> escape(str(value)) + Markup('&lt;User 1&gt;') + >>> escape(soft_str(value)) + Markup('<User 1>') + """ + if not isinstance(s, str): + return str(s) + + return s diff --git a/backend/test/lib/python3.8/site-packages/markupsafe/_speedups.c b/backend/test/lib/python3.8/site-packages/markupsafe/_speedups.c new file mode 100644 index 0000000000000000000000000000000000000000..3c463fb82d53e9a9616acfbbece0eb3be6d0d5e7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/markupsafe/_speedups.c @@ -0,0 +1,320 @@ +#include <Python.h> + +static PyObject* markup; + +static int +init_constants(void) +{ + PyObject *module; + + /* import markup type so that we can mark the return value */ + module = PyImport_ImportModule("markupsafe"); + if (!module) + return 0; + markup = PyObject_GetAttrString(module, "Markup"); + Py_DECREF(module); + + return 1; +} + +#define GET_DELTA(inp, inp_end, delta) \ + while (inp < inp_end) { \ + switch (*inp++) { \ + case '"': \ + case '\'': \ + case '&': \ + delta += 4; \ + break; \ + case '<': \ + case '>': \ + delta += 3; \ + break; \ + } \ + } + +#define DO_ESCAPE(inp, inp_end, outp) \ + { \ + Py_ssize_t ncopy = 0; \ + while (inp < inp_end) { \ + switch (*inp) { \ + case '"': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '4'; \ + *outp++ = ';'; \ + break; \ + case '\'': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = '#'; \ + *outp++ = '3'; \ + *outp++ = '9'; \ + *outp++ = ';'; \ + break; \ + case '&': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'a'; \ + *outp++ = 'm'; \ + *outp++ = 'p'; \ + *outp++ = ';'; \ + break; \ + case '<': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'l'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + case '>': \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + outp += ncopy; ncopy = 0; \ + *outp++ = '&'; \ + *outp++ = 'g'; \ + *outp++ = 't'; \ + *outp++ = ';'; \ + break; \ + default: \ + ncopy++; \ + } \ + inp++; \ + } \ + memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \ + } + +static PyObject* +escape_unicode_kind1(PyUnicodeObject *in) +{ + Py_UCS1 *inp = PyUnicode_1BYTE_DATA(in); + Py_UCS1 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS1 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, + PyUnicode_IS_ASCII(in) ? 127 : 255); + if (!out) + return NULL; + + inp = PyUnicode_1BYTE_DATA(in); + outp = PyUnicode_1BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode_kind2(PyUnicodeObject *in) +{ + Py_UCS2 *inp = PyUnicode_2BYTE_DATA(in); + Py_UCS2 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS2 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 65535); + if (!out) + return NULL; + + inp = PyUnicode_2BYTE_DATA(in); + outp = PyUnicode_2BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + + +static PyObject* +escape_unicode_kind4(PyUnicodeObject *in) +{ + Py_UCS4 *inp = PyUnicode_4BYTE_DATA(in); + Py_UCS4 *inp_end = inp + PyUnicode_GET_LENGTH(in); + Py_UCS4 *outp; + PyObject *out; + Py_ssize_t delta = 0; + + GET_DELTA(inp, inp_end, delta); + if (!delta) { + Py_INCREF(in); + return (PyObject*)in; + } + + out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 1114111); + if (!out) + return NULL; + + inp = PyUnicode_4BYTE_DATA(in); + outp = PyUnicode_4BYTE_DATA(out); + DO_ESCAPE(inp, inp_end, outp); + return out; +} + +static PyObject* +escape_unicode(PyUnicodeObject *in) +{ + if (PyUnicode_READY(in)) + return NULL; + + switch (PyUnicode_KIND(in)) { + case PyUnicode_1BYTE_KIND: + return escape_unicode_kind1(in); + case PyUnicode_2BYTE_KIND: + return escape_unicode_kind2(in); + case PyUnicode_4BYTE_KIND: + return escape_unicode_kind4(in); + } + assert(0); /* shouldn't happen */ + return NULL; +} + +static PyObject* +escape(PyObject *self, PyObject *text) +{ + static PyObject *id_html; + PyObject *s = NULL, *rv = NULL, *html; + + if (id_html == NULL) { + id_html = PyUnicode_InternFromString("__html__"); + if (id_html == NULL) { + return NULL; + } + } + + /* we don't have to escape integers, bools or floats */ + if (PyLong_CheckExact(text) || + PyFloat_CheckExact(text) || PyBool_Check(text) || + text == Py_None) + return PyObject_CallFunctionObjArgs(markup, text, NULL); + + /* if the object has an __html__ method that performs the escaping */ + html = PyObject_GetAttr(text ,id_html); + if (html) { + s = PyObject_CallObject(html, NULL); + Py_DECREF(html); + if (s == NULL) { + return NULL; + } + /* Convert to Markup object */ + rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL); + Py_DECREF(s); + return rv; + } + + /* otherwise make the object unicode if it isn't, then escape */ + PyErr_Clear(); + if (!PyUnicode_Check(text)) { + PyObject *unicode = PyObject_Str(text); + if (!unicode) + return NULL; + s = escape_unicode((PyUnicodeObject*)unicode); + Py_DECREF(unicode); + } + else + s = escape_unicode((PyUnicodeObject*)text); + + /* convert the unicode string into a markup object. */ + rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL); + Py_DECREF(s); + return rv; +} + + +static PyObject* +escape_silent(PyObject *self, PyObject *text) +{ + if (text != Py_None) + return escape(self, text); + return PyObject_CallFunctionObjArgs(markup, NULL); +} + + +static PyObject* +soft_str(PyObject *self, PyObject *s) +{ + if (!PyUnicode_Check(s)) + return PyObject_Str(s); + Py_INCREF(s); + return s; +} + + +static PyMethodDef module_methods[] = { + { + "escape", + (PyCFunction)escape, + METH_O, + "Replace the characters ``&``, ``<``, ``>``, ``'``, and ``\"`` in" + " the string with HTML-safe sequences. Use this if you need to display" + " text that might contain such characters in HTML.\n\n" + "If the object has an ``__html__`` method, it is called and the" + " return value is assumed to already be safe for HTML.\n\n" + ":param s: An object to be converted to a string and escaped.\n" + ":return: A :class:`Markup` string with the escaped text.\n" + }, + { + "escape_silent", + (PyCFunction)escape_silent, + METH_O, + "Like :func:`escape` but treats ``None`` as the empty string." + " Useful with optional values, as otherwise you get the string" + " ``'None'`` when the value is ``None``.\n\n" + ">>> escape(None)\n" + "Markup('None')\n" + ">>> escape_silent(None)\n" + "Markup('')\n" + }, + { + "soft_str", + (PyCFunction)soft_str, + METH_O, + "Convert an object to a string if it isn't already. This preserves" + " a :class:`Markup` string rather than converting it back to a basic" + " string, so it will still be marked as safe and won't be escaped" + " again.\n\n" + ">>> value = escape(\"<User 1>\")\n" + ">>> value\n" + "Markup('<User 1>')\n" + ">>> escape(str(value))\n" + "Markup('&lt;User 1&gt;')\n" + ">>> escape(soft_str(value))\n" + "Markup('<User 1>')\n" + }, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +static struct PyModuleDef module_definition = { + PyModuleDef_HEAD_INIT, + "markupsafe._speedups", + NULL, + -1, + module_methods, + NULL, + NULL, + NULL, + NULL +}; + +PyMODINIT_FUNC +PyInit__speedups(void) +{ + if (!init_constants()) + return NULL; + + return PyModule_Create(&module_definition); +} diff --git a/backend/test/lib/python3.8/site-packages/markupsafe/_speedups.cpython-38-x86_64-linux-gnu.so b/backend/test/lib/python3.8/site-packages/markupsafe/_speedups.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000000000000000000000000000000000000..817057dcc70f056cbfaab5aabb578da8d0825523 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/markupsafe/_speedups.cpython-38-x86_64-linux-gnu.so differ diff --git a/backend/test/lib/python3.8/site-packages/markupsafe/_speedups.pyi b/backend/test/lib/python3.8/site-packages/markupsafe/_speedups.pyi new file mode 100644 index 0000000000000000000000000000000000000000..f673240f6d299917d829a5fdbf85d25d84dd0a72 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/markupsafe/_speedups.pyi @@ -0,0 +1,9 @@ +from typing import Any +from typing import Optional + +from . import Markup + +def escape(s: Any) -> Markup: ... +def escape_silent(s: Optional[Any]) -> Markup: ... +def soft_str(s: Any) -> str: ... +def soft_unicode(s: Any) -> str: ... diff --git a/backend/test/lib/python3.8/site-packages/markupsafe/py.typed b/backend/test/lib/python3.8/site-packages/markupsafe/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/LICENSE.txt b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..737fec5c5352af3d9a6a47a0670da4bdb52c5725 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..5183c4e68338e1f31d47c924c974b5d85f917770 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/METADATA @@ -0,0 +1,84 @@ +Metadata-Version: 2.1 +Name: pip +Version: 20.0.2 +Summary: The PyPA recommended tool for installing Python packages. +Home-page: https://pip.pypa.io/ +Author: The pip developers +Author-email: pypa-dev@groups.google.com +License: MIT +Project-URL: Documentation, https://pip.pypa.io +Project-URL: Source, https://github.com/pypa/pip +Keywords: distutils easy_install egg setuptools wheel virtualenv +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.* + +pip - The Python Package Installer +================================== + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.org/project/pip/ + +.. image:: https://readthedocs.org/projects/pip/badge/?version=latest + :target: https://pip.pypa.io/en/latest + +pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes. + +Please take a look at our documentation for how to install and use pip: + +* `Installation`_ +* `Usage`_ + +Updates are released regularly, with a new version every 3 months. More details can be found in our documentation: + +* `Release notes`_ +* `Release process`_ + +If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms: + +* `Issue tracking`_ +* `Discourse channel`_ +* `User IRC`_ + +If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms: + +* `GitHub page`_ +* `Dev documentation`_ +* `Dev mailing list`_ +* `Dev IRC`_ + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _package installer: https://packaging.python.org/guides/tool-recommendations/ +.. _Python Package Index: https://pypi.org +.. _Installation: https://pip.pypa.io/en/stable/installing.html +.. _Usage: https://pip.pypa.io/en/stable/ +.. _Release notes: https://pip.pypa.io/en/stable/news.html +.. _Release process: https://pip.pypa.io/en/latest/development/release-process/ +.. _GitHub page: https://github.com/pypa/pip +.. _Dev documentation: https://pip.pypa.io/en/latest/development +.. _Issue tracking: https://github.com/pypa/pip/issues +.. _Discourse channel: https://discuss.python.org/c/packaging +.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev +.. _User IRC: https://webchat.freenode.net/?channels=%23pypa +.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..58529acad26756b34263501423f262f6918e1bea --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/RECORD @@ -0,0 +1,246 @@ +../../../bin/pip,sha256=o6OaALnQBC5f3jV3pnSa6-F9lIonXyWjYfEzVKP014Y,258 +../../../bin/pip3,sha256=o6OaALnQBC5f3jV3pnSa6-F9lIonXyWjYfEzVKP014Y,258 +../../../bin/pip3.8,sha256=o6OaALnQBC5f3jV3pnSa6-F9lIonXyWjYfEzVKP014Y,258 +pip-20.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip-20.0.2.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090 +pip-20.0.2.dist-info/METADATA,sha256=MSgjT2JTt8usp4Hopp5AGEmc-7sKR2Jd7HTMJqCoRhw,3352 +pip-20.0.2.dist-info/RECORD,, +pip-20.0.2.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +pip-20.0.2.dist-info/entry_points.txt,sha256=HtfDOwpUlr9s73jqLQ6wF9V0_0qvUXJwCBz7Vwx0Ue0,125 +pip-20.0.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/__init__.py,sha256=U1AM82iShMaw90K6Yq0Q2-AZ1EsOcqQLQRB-rxwFtII,455 +pip/__main__.py,sha256=NM95x7KuQr-lwPoTjAC0d_QzLJsJjpmAoxZg0mP8s98,632 +pip/__pycache__/__init__.cpython-38.pyc,, +pip/__pycache__/__main__.cpython-38.pyc,, +pip/_internal/__init__.py,sha256=j5fiII6yCeZjpW7_7wAVRMM4DwE-gyARGVU4yAADDeE,517 +pip/_internal/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/__pycache__/build_env.cpython-38.pyc,, +pip/_internal/__pycache__/cache.cpython-38.pyc,, +pip/_internal/__pycache__/configuration.cpython-38.pyc,, +pip/_internal/__pycache__/exceptions.cpython-38.pyc,, +pip/_internal/__pycache__/legacy_resolve.cpython-38.pyc,, +pip/_internal/__pycache__/locations.cpython-38.pyc,, +pip/_internal/__pycache__/main.cpython-38.pyc,, +pip/_internal/__pycache__/pep425tags.cpython-38.pyc,, +pip/_internal/__pycache__/pyproject.cpython-38.pyc,, +pip/_internal/__pycache__/self_outdated_check.cpython-38.pyc,, +pip/_internal/__pycache__/wheel_builder.cpython-38.pyc,, +pip/_internal/build_env.py,sha256=--aNgzIdYrCOclHMwoAdpclCpfdFE_jooRuCy5gczwg,7532 +pip/_internal/cache.py,sha256=16GrnDRLBQNlfKWIuIF6Sa-EFS78kez_w1WEjT3ykTI,11605 +pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132 +pip/_internal/cli/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/cli/__pycache__/autocompletion.cpython-38.pyc,, +pip/_internal/cli/__pycache__/base_command.cpython-38.pyc,, +pip/_internal/cli/__pycache__/cmdoptions.cpython-38.pyc,, +pip/_internal/cli/__pycache__/command_context.cpython-38.pyc,, +pip/_internal/cli/__pycache__/main.cpython-38.pyc,, +pip/_internal/cli/__pycache__/main_parser.cpython-38.pyc,, +pip/_internal/cli/__pycache__/parser.cpython-38.pyc,, +pip/_internal/cli/__pycache__/req_command.cpython-38.pyc,, +pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc,, +pip/_internal/cli/autocompletion.py,sha256=ekGNtcDI0p7rFVc-7s4T9Tbss4Jgb7vsB649XJIblRg,6547 +pip/_internal/cli/base_command.py,sha256=v6yl5XNRqye8BT9ep8wvpMu6lylP_Hu6D95r_HqbpbQ,7948 +pip/_internal/cli/cmdoptions.py,sha256=f1TVHuu_fR3lLlMo6b367H_GsWFv26tLI9cAS-kZfE0,28114 +pip/_internal/cli/command_context.py,sha256=ygMVoTy2jpNilKT-6416gFSQpaBtrKRBbVbi2fy__EU,975 +pip/_internal/cli/main.py,sha256=8iq3bHe5lxJTB2EvKOqZ38NS0MmoS79_S1kgj4QuH8A,2610 +pip/_internal/cli/main_parser.py,sha256=W9OWeryh7ZkqELohaFh0Ko9sB98ZkSeDmnYbOZ1imBc,2819 +pip/_internal/cli/parser.py,sha256=O9djTuYQuSfObiY-NU6p4MJCfWsRUnDpE2YGA_fwols,9487 +pip/_internal/cli/req_command.py,sha256=pAUAglpTn0mUA6lRs7KN71yOm1KDabD0ySVTQTqWTSA,12463 +pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 +pip/_internal/commands/__init__.py,sha256=uTSj58QlrSKeXqCUSdL-eAf_APzx5BHy1ABxb0j5ZNE,3714 +pip/_internal/commands/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/commands/__pycache__/check.cpython-38.pyc,, +pip/_internal/commands/__pycache__/completion.cpython-38.pyc,, +pip/_internal/commands/__pycache__/configuration.cpython-38.pyc,, +pip/_internal/commands/__pycache__/debug.cpython-38.pyc,, +pip/_internal/commands/__pycache__/download.cpython-38.pyc,, +pip/_internal/commands/__pycache__/freeze.cpython-38.pyc,, +pip/_internal/commands/__pycache__/hash.cpython-38.pyc,, +pip/_internal/commands/__pycache__/help.cpython-38.pyc,, +pip/_internal/commands/__pycache__/install.cpython-38.pyc,, +pip/_internal/commands/__pycache__/list.cpython-38.pyc,, +pip/_internal/commands/__pycache__/search.cpython-38.pyc,, +pip/_internal/commands/__pycache__/show.cpython-38.pyc,, +pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc,, +pip/_internal/commands/__pycache__/wheel.cpython-38.pyc,, +pip/_internal/commands/check.py,sha256=mgLNYT3bd6Kmynwh4zzcBmVlFZ-urMo40jTgk6U405E,1505 +pip/_internal/commands/completion.py,sha256=UFQvq0Q4_B96z1bvnQyMOq82aPSu05RejbLmqeTZjC0,2975 +pip/_internal/commands/configuration.py,sha256=6riioZjMhsNSEct7dE-X8SobGodk3WERKJvuyjBje4Q,7226 +pip/_internal/commands/debug.py,sha256=a8llax2hRkxgK-tvwdJgaCaZCYPIx0fDvrlMDoYr8bQ,4209 +pip/_internal/commands/download.py,sha256=zX_0-IeFb4C8dxSmGHxk-6H5kehtyTSsdWpjNpAhSww,5007 +pip/_internal/commands/freeze.py,sha256=QS-4ib8jbKJ2wrDaDbTuyaB3Y_iJ5CQC2gAVHuAv9QU,3481 +pip/_internal/commands/hash.py,sha256=47teimfAPhpkaVbSDaafck51BT3XXYuL83lAqc5lOcE,1735 +pip/_internal/commands/help.py,sha256=Nhecq--ydFn80Gm1Zvbf9943EcRJfO0TnXUhsF0RO7s,1181 +pip/_internal/commands/install.py,sha256=T4P3J1rw7CQrZX4OUamtcoWMkTrJBfUe6gWpTfZW1bQ,27286 +pip/_internal/commands/list.py,sha256=2l0JiqHxjxDHNTCb2HZOjwwdo4duS1R0MsqZb6HSMKk,10660 +pip/_internal/commands/search.py,sha256=7Il8nKZ9mM7qF5jlnBoPvSIFY9f-0-5IbYoX3miTuZY,5148 +pip/_internal/commands/show.py,sha256=Vzsj2oX0JBl94MPyF3LV8YoMcigl8B2UsMM8zp0pH2s,6792 +pip/_internal/commands/uninstall.py,sha256=8mldFbrQecSoWDZRqxBgJkrlvx6Y9Iy7cs-2BIgtXt4,2983 +pip/_internal/commands/wheel.py,sha256=TMU5ZhjLo7BIZQApGPsYfoCsbGTnvP-N9jkgPJXhj1Y,7170 +pip/_internal/configuration.py,sha256=MgKrLFBJBkF3t2VJM4tvlnEspfSuS4scp_LhHWh53nY,14222 +pip/_internal/distributions/__init__.py,sha256=ECBUW5Gtu9TjJwyFLvim-i6kUMYVuikNh9I5asL6tbA,959 +pip/_internal/distributions/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/distributions/__pycache__/base.cpython-38.pyc,, +pip/_internal/distributions/__pycache__/installed.cpython-38.pyc,, +pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc,, +pip/_internal/distributions/__pycache__/wheel.cpython-38.pyc,, +pip/_internal/distributions/base.py,sha256=ruprpM_L2T2HNi3KLUHlbHimZ1sWVw-3Q0Lb8O7TDAI,1425 +pip/_internal/distributions/installed.py,sha256=YqlkBKr6TVP1MAYS6SG8ojud21wVOYLMZ8jMLJe9MSU,760 +pip/_internal/distributions/sdist.py,sha256=D4XTMlCwgPlK69l62GLYkNSVTVe99fR5iAcVt2EbGok,4086 +pip/_internal/distributions/wheel.py,sha256=95uD-TfaYoq3KiKBdzk9YMN4RRqJ28LNoSTS2K46gek,1294 +pip/_internal/exceptions.py,sha256=6YRuwXAK6F1iyUWKIkCIpWWN2khkAn1sZOgrFA9S8Ro,10247 +pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30 +pip/_internal/index/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/index/__pycache__/collector.cpython-38.pyc,, +pip/_internal/index/__pycache__/package_finder.cpython-38.pyc,, +pip/_internal/index/collector.py,sha256=YS7Ix4oylU7ZbPTPFugh-244GSRqMvdHsGUG6nmz2gE,17892 +pip/_internal/index/package_finder.py,sha256=2Rg75AOpLj8BN1jyL8EI-Iw-Hv6ibJkrYVARCht3bX8,37542 +pip/_internal/legacy_resolve.py,sha256=L7R72I7CjVgJlPTggmA1j4b-H8NmxNu_dKVhrpGXGps,16277 +pip/_internal/locations.py,sha256=VifFEqhc7FWFV8QGoEM3CpECRY8Doq7kTytytxsEgx0,6734 +pip/_internal/main.py,sha256=IVBnUQ-FG7DK6617uEXRB5_QJqspAsBFmTmTesYkbdQ,437 +pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63 +pip/_internal/models/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/models/__pycache__/candidate.cpython-38.pyc,, +pip/_internal/models/__pycache__/format_control.cpython-38.pyc,, +pip/_internal/models/__pycache__/index.cpython-38.pyc,, +pip/_internal/models/__pycache__/link.cpython-38.pyc,, +pip/_internal/models/__pycache__/scheme.cpython-38.pyc,, +pip/_internal/models/__pycache__/search_scope.cpython-38.pyc,, +pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc,, +pip/_internal/models/__pycache__/target_python.cpython-38.pyc,, +pip/_internal/models/__pycache__/wheel.cpython-38.pyc,, +pip/_internal/models/candidate.py,sha256=Y58Bcm6oXUj0iS-yhmerlGo5CQJI2p0Ww9h6hR9zQDw,1150 +pip/_internal/models/format_control.py,sha256=ICzVjjGwfZYdX-eLLKHjMHLutEJlAGpfj09OG_eMqac,2673 +pip/_internal/models/index.py,sha256=K59A8-hVhBM20Xkahr4dTwP7OjkJyEqXH11UwHFVgqM,1060 +pip/_internal/models/link.py,sha256=y0H2ZOk0P6d1lfGUL2Pl09xFgZcRt5HwN2LElMifOpI,6827 +pip/_internal/models/scheme.py,sha256=vvhBrrno7eVDXcdKHiZWwxhPHf4VG5uSCEkC0QDR2RU,679 +pip/_internal/models/search_scope.py,sha256=2LXbU4wV8LwqdtXQXNXFYKv-IxiDI_QwSz9ZgbwtAfk,3898 +pip/_internal/models/selection_prefs.py,sha256=rPeif2KKjhTPXeMoQYffjqh10oWpXhdkxRDaPT1HO8k,1908 +pip/_internal/models/target_python.py,sha256=c-cFi6zCuo5HYbXNS3rVVpKRaHVh5yQlYEjEW23SidQ,3799 +pip/_internal/models/wheel.py,sha256=UQJyd3V1TTTcFLrsOXHKpoxO5PJfPaIC9y9NbOLNfvc,2791 +pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50 +pip/_internal/network/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/network/__pycache__/auth.cpython-38.pyc,, +pip/_internal/network/__pycache__/cache.cpython-38.pyc,, +pip/_internal/network/__pycache__/download.cpython-38.pyc,, +pip/_internal/network/__pycache__/session.cpython-38.pyc,, +pip/_internal/network/__pycache__/utils.cpython-38.pyc,, +pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc,, +pip/_internal/network/auth.py,sha256=K3G1ukKb3PiH8w_UnpXTz8qQsTULO-qdbfOE9zTo1fE,11119 +pip/_internal/network/cache.py,sha256=51CExcRkXWrgMZ7WsrZ6cmijKfViD5tVgKbBvJHO1IE,2394 +pip/_internal/network/download.py,sha256=3D9vdJmVwmCUMxzC-TaVI_GvVOpQna3BLEYNPCSx3Fc,6260 +pip/_internal/network/session.py,sha256=u1IXQfv21R1xv86ulyiB58-be4sYm90eFB0Wp8fVMYw,14702 +pip/_internal/network/utils.py,sha256=iiixo1OeaQ3niUWiBjg59PN6f1w7vvTww1vFriTD_IU,1959 +pip/_internal/network/xmlrpc.py,sha256=AL115M3vFJ8xiHVJneb8Hi0ZFeRvdPhblC89w25OG5s,1597 +pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/operations/__pycache__/check.cpython-38.pyc,, +pip/_internal/operations/__pycache__/freeze.cpython-38.pyc,, +pip/_internal/operations/__pycache__/prepare.cpython-38.pyc,, +pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc,, +pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-38.pyc,, +pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc,, +pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-38.pyc,, +pip/_internal/operations/build/metadata.py,sha256=yHMi5gHYXcXyHcvUPWHdO-UyOo3McFWljn_nHfM1O9c,1307 +pip/_internal/operations/build/metadata_legacy.py,sha256=4n6N7BTysqVmEpITzT2UVClyt0Peij_Im8Qm965IWB4,3957 +pip/_internal/operations/build/wheel.py,sha256=ntltdNP6D2Tpr4V0agssu6rE0F9LaBpJkYT6zSdhEbw,1469 +pip/_internal/operations/build/wheel_legacy.py,sha256=DYSxQKutwSZnmNvWkwsl2HzE2XQBxV0i0wTphjtUe90,3349 +pip/_internal/operations/check.py,sha256=a6uHG0daoWpmSPCdL7iYJaGQYZ-CRvPvTnCv2PnIIs0,5353 +pip/_internal/operations/freeze.py,sha256=td4BeRnW10EXFTZrx6VgygO3CrjqD5B9f0BGzjQm-Ew,10180 +pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51 +pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc,, +pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc,, +pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc,, +pip/_internal/operations/install/editable_legacy.py,sha256=rJ_xs2qtDUjpY2-n6eYlVyZiNoKbOtZXZrYrcnIELt4,1488 +pip/_internal/operations/install/legacy.py,sha256=eBV8gHbO9sBlBc-4nuR3Sd2nikHgEcnC9khfeLiypio,4566 +pip/_internal/operations/install/wheel.py,sha256=xdCjH6uIUyg39Pf8tUaMFUN4a7eozJAFMb_wKcgQlsY,23012 +pip/_internal/operations/prepare.py,sha256=ro2teBlbBpkRJhBKraP9CoJgVLpueSk62ziWhRToXww,20942 +pip/_internal/pep425tags.py,sha256=SlIQokevkoKnXhoK3PZvXiDoj8hFKoJ7thDifDtga3k,5490 +pip/_internal/pyproject.py,sha256=VJKsrXORGiGoDPVKCQhuu4tWlQSTOhoiRlVLRNu4rx4,7400 +pip/_internal/req/__init__.py,sha256=UVaYPlHZVGRBQQPjvGC_6jJDQtewXm0ws-8Lxhg_TiY,2671 +pip/_internal/req/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/req/__pycache__/constructors.cpython-38.pyc,, +pip/_internal/req/__pycache__/req_file.cpython-38.pyc,, +pip/_internal/req/__pycache__/req_install.cpython-38.pyc,, +pip/_internal/req/__pycache__/req_set.cpython-38.pyc,, +pip/_internal/req/__pycache__/req_tracker.cpython-38.pyc,, +pip/_internal/req/__pycache__/req_uninstall.cpython-38.pyc,, +pip/_internal/req/constructors.py,sha256=w5-kWWVCqlSqcIBitw86yq7XGMPpKrHDfQZSE2mJ_xc,14388 +pip/_internal/req/req_file.py,sha256=ECqRUicCw5Y08R1YynZAAp8dSKQhDXoc1Q-mY3a9b6I,18485 +pip/_internal/req/req_install.py,sha256=wjsIr4lDpbVSLqANKJI9mXwRVHaRxcnj8q30UiHoLRA,30442 +pip/_internal/req/req_set.py,sha256=GsrKmupRKhNMhjkofVfCEHEHfgEvYBxClaQH5xLBQHg,8066 +pip/_internal/req/req_tracker.py,sha256=27fvVG8Y2MJS1KpU2rBMnQyUEMHG4lkHT_bzbzQK-c0,4723 +pip/_internal/req/req_uninstall.py,sha256=DWnOsuyYGju6-sylyoCm7GtUNevn9qMAVhjAGLcdXUE,23609 +pip/_internal/self_outdated_check.py,sha256=3KO1pTJUuYaiV9X0t87I9PimkGL82HbhLWbocqKZpBU,8009 +pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_internal/utils/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/utils/__pycache__/appdirs.cpython-38.pyc,, +pip/_internal/utils/__pycache__/compat.cpython-38.pyc,, +pip/_internal/utils/__pycache__/deprecation.cpython-38.pyc,, +pip/_internal/utils/__pycache__/distutils_args.cpython-38.pyc,, +pip/_internal/utils/__pycache__/encoding.cpython-38.pyc,, +pip/_internal/utils/__pycache__/entrypoints.cpython-38.pyc,, +pip/_internal/utils/__pycache__/filesystem.cpython-38.pyc,, +pip/_internal/utils/__pycache__/filetypes.cpython-38.pyc,, +pip/_internal/utils/__pycache__/glibc.cpython-38.pyc,, +pip/_internal/utils/__pycache__/hashes.cpython-38.pyc,, +pip/_internal/utils/__pycache__/inject_securetransport.cpython-38.pyc,, +pip/_internal/utils/__pycache__/logging.cpython-38.pyc,, +pip/_internal/utils/__pycache__/marker_files.cpython-38.pyc,, +pip/_internal/utils/__pycache__/misc.cpython-38.pyc,, +pip/_internal/utils/__pycache__/models.cpython-38.pyc,, +pip/_internal/utils/__pycache__/packaging.cpython-38.pyc,, +pip/_internal/utils/__pycache__/pkg_resources.cpython-38.pyc,, +pip/_internal/utils/__pycache__/setuptools_build.cpython-38.pyc,, +pip/_internal/utils/__pycache__/subprocess.cpython-38.pyc,, +pip/_internal/utils/__pycache__/temp_dir.cpython-38.pyc,, +pip/_internal/utils/__pycache__/typing.cpython-38.pyc,, +pip/_internal/utils/__pycache__/ui.cpython-38.pyc,, +pip/_internal/utils/__pycache__/unpacking.cpython-38.pyc,, +pip/_internal/utils/__pycache__/urls.cpython-38.pyc,, +pip/_internal/utils/__pycache__/virtualenv.cpython-38.pyc,, +pip/_internal/utils/__pycache__/wheel.cpython-38.pyc,, +pip/_internal/utils/appdirs.py,sha256=PVo_7-IQWHa9qNuNbWSFiF2QGqeLbSAR4eLcYYhQ9ek,1307 +pip/_internal/utils/compat.py,sha256=D7FKGLBdQwWH-dHIGaoWMawDZWBYApvtJVL1kFPJ930,8869 +pip/_internal/utils/deprecation.py,sha256=pBnNogoA4UGTxa_JDnPXBRRYpKMbExAhXpBwAwklOBs,3318 +pip/_internal/utils/distutils_args.py,sha256=a56mblNxk9BGifbpEETG61mmBrqhjtjRkJ4HYn-oOEE,1350 +pip/_internal/utils/encoding.py,sha256=hxZz0t3Whw3d4MHQEiofxalTlfKwxFdLc8fpeGfhKo8,1320 +pip/_internal/utils/entrypoints.py,sha256=vHcNpnksCv6mllihU6hfifdsKPEjwcaJ1aLIXEaynaU,1152 +pip/_internal/utils/filesystem.py,sha256=PXa3vMcz4mbEKtkD0joFI8pBwddLQxhfPFOkVH5xjfE,5255 +pip/_internal/utils/filetypes.py,sha256=R2FwzoeX7b-rZALOXx5cuO8VPPMhUQ4ne7wm3n3IcWA,571 +pip/_internal/utils/glibc.py,sha256=LOeNGgawCKS-4ke9fii78fwXD73dtNav3uxz1Bf-Ab8,3297 +pip/_internal/utils/hashes.py,sha256=my-wSnAWEDvl_8rQaOQcVIWjwh1-f_QiEvGy9TPf53U,3942 +pip/_internal/utils/inject_securetransport.py,sha256=M17ZlFVY66ApgeASVjKKLKNz0LAfk-SyU0HZ4ZB6MmI,810 +pip/_internal/utils/logging.py,sha256=aJL7NldPhS5KGFof6Qt3o3MG5cjm5TOoo7bGRu9_wsg,13033 +pip/_internal/utils/marker_files.py,sha256=CO5djQlrPIozJpJybViH_insoAaBGY1aqEt6-cC-iW0,741 +pip/_internal/utils/misc.py,sha256=uIb58Hiu_g2HRORo2aMcgnW_7R5d-5wUAuoW0fA2ZME,26085 +pip/_internal/utils/models.py,sha256=IA0hw_T4awQzui0kqfIEASm5yLtgZAB08ag59Nip5G8,1148 +pip/_internal/utils/packaging.py,sha256=VtiwcAAL7LBi7tGL2je7LeW4bE11KMHGCsJ1NZY5XtM,3035 +pip/_internal/utils/pkg_resources.py,sha256=ZX-k7V5q_aNWyDse92nN7orN1aCpRLsaxzpkBZ1XKzU,1254 +pip/_internal/utils/setuptools_build.py,sha256=DouaVolV9olDDFIIN9IszaL-FHdNaZt10ufOZFH9ZAU,5070 +pip/_internal/utils/subprocess.py,sha256=Ph3x5eHQBxFotyGhpZN8asSMBud-BBkmgaNfARG-di8,9922 +pip/_internal/utils/temp_dir.py,sha256=87Ib8aNic_hoSDEmUYJHTQIn5-prL2AYL5u_yZ3s4sI,7768 +pip/_internal/utils/typing.py,sha256=xkYwOeHlf4zsHXBDC4310HtEqwhQcYXFPq2h35Tcrl0,1401 +pip/_internal/utils/ui.py,sha256=0FNxXlGtbpPtTviv2oXS9t8bQG_NBdfUgP4GbubhS9U,13911 +pip/_internal/utils/unpacking.py,sha256=M944JTSiapBOSKLWu7lbawpVHSE7flfzZTEr3TAG7v8,9438 +pip/_internal/utils/urls.py,sha256=aNV9wq5ClUmrz6sG-al7hEWJ4ToitOy7l82CmFGFNW8,1481 +pip/_internal/utils/virtualenv.py,sha256=Q3S1WPlI7JWpGOT2jUVJ8l2chm_k7VPJ9cHA_cUluEU,3396 +pip/_internal/utils/wheel.py,sha256=grTRwZtMQwApwbbSPmRVLtac6FKy6SVKeCXNkWyyePA,7302 +pip/_internal/vcs/__init__.py,sha256=viJxJRqRE_mVScum85bgQIXAd6o0ozFt18VpC-qIJrM,617 +pip/_internal/vcs/__pycache__/__init__.cpython-38.pyc,, +pip/_internal/vcs/__pycache__/bazaar.cpython-38.pyc,, +pip/_internal/vcs/__pycache__/git.cpython-38.pyc,, +pip/_internal/vcs/__pycache__/mercurial.cpython-38.pyc,, +pip/_internal/vcs/__pycache__/subversion.cpython-38.pyc,, +pip/_internal/vcs/__pycache__/versioncontrol.cpython-38.pyc,, +pip/_internal/vcs/bazaar.py,sha256=84q1-kj1_nJ9AMzMu8RmMp-riRZu81M7K9kowcYgi3U,3957 +pip/_internal/vcs/git.py,sha256=CdLz3DTsZsLMLPZpEuUwiS40npvDaVB1CNRzoXgcuJQ,14352 +pip/_internal/vcs/mercurial.py,sha256=2mg7BdYI_Fe00fF6omaNccFQLPHBsDBG5CAEzvqn5sA,5110 +pip/_internal/vcs/subversion.py,sha256=Fpwy71AmuqXnoKi6h1SrXRtPjEMn8fieuM1O4j01IBg,12292 +pip/_internal/vcs/versioncontrol.py,sha256=nqoaM1_rzx24WnHtihXA8RcPpnUae0sV2sR_LS_5HFA,22600 +pip/_internal/wheel_builder.py,sha256=gr9jE14W5ZuYblpldo-tpRuyG0e0AVmHLttImuAvXlE,9441 +pip/_vendor/__init__.py,sha256=RcHf8jwLPL0ZEaa6uMhTSfyCrA_TpWgDWAW5br9xD7Y,4975 +pip/_vendor/__pycache__/__init__.cpython-38.pyc,, diff --git a/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..ef99c6cf3283b50a273ac4c6d009a0aa85597070 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/entry_points.txt b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..d48bd8a85e683c7a9607f3f418f50d11445bdf40 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip._internal.cli.main:main +pip3 = pip._internal.cli.main:main +pip3.8 = pip._internal.cli.main:main + diff --git a/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip-20.0.2.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/pip/__init__.py b/backend/test/lib/python3.8/site-packages/pip/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..827a4e20a7b0a7824ae863f97f0b0c1c38408030 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/__init__.py @@ -0,0 +1,18 @@ +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + + +__version__ = "20.0.2" + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is an internal API only meant for use by pip's own console scripts. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/backend/test/lib/python3.8/site-packages/pip/__main__.py b/backend/test/lib/python3.8/site-packages/pip/__main__.py new file mode 100644 index 0000000000000000000000000000000000000000..e83b9e056b321828cbc8990f719ebb4a729c9bea --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/__main__.py @@ -0,0 +1,19 @@ +from __future__ import absolute_import + +import os +import sys + +# If we are running from a wheel, add the wheel to sys.path +# This allows the usage python pip-*.whl/pip install pip-*.whl +if __package__ == '': + # __file__ is pip-*.whl/pip/__main__.py + # first dirname call strips of '/__main__.py', second strips off '/pip' + # Resulting path is the name of the wheel itself + # Add that to sys.path so we can import pip + path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, path) + +from pip._internal.cli.main import main as _main # isort:skip # noqa + +if __name__ == '__main__': + sys.exit(_main()) diff --git a/backend/test/lib/python3.8/site-packages/pip/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5045b3619b145b8ba308aaf4823fe1e56cda2f1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/__pycache__/__main__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/__pycache__/__main__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc46c4c259c1fe4abec4644072788f101acb62e2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/__pycache__/__main__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3aa8a4693ff0893a87364964f06bad8075e4834b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/__init__.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +import pip._internal.utils.inject_securetransport # noqa +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, List + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc1ea0677ea8489f698c6d5c836163b49e623e5f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/build_env.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/build_env.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8364bb5636f50dd864dcc92716b470d4ac8dc964 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/build_env.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/cache.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/cache.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dcf10028f925a22cc8fbd9687d05ff9da4a9ebc8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/cache.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/configuration.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/configuration.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f5e1598367a2b987bb59ebb0781bd3ea46aa9dba Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/configuration.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/exceptions.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..67a9a56a16871c41a981170f1671ac9d6a69c82d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/exceptions.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cd5e057eb41785fa2ee3c89efd972fa1af8aebca Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/legacy_resolve.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/locations.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/locations.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2db1d0d0dd9204fdacb5cda301a096ac0beb7b6e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/locations.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/main.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/main.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f53fe69fc528c40fb2b6a43a4c37618e0abca3b9 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/main.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/pep425tags.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/pep425tags.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc3ad90d5139b2e6f2e62bf89fc5c1972515d8e7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/pep425tags.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/pyproject.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/pyproject.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8628e9997eeda1f5d1acdd345a8e675c4eed2431 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/pyproject.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8086ba9131417e11944545f76e54c5453ad94d91 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2aebcc76f6c5153be90eed68f5527022b547f75c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/build_env.py b/backend/test/lib/python3.8/site-packages/pip/_internal/build_env.py new file mode 100644 index 0000000000000000000000000000000000000000..f55f0e6b8d9e73ff9b751ce2f0c2513123d17100 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/build_env.py @@ -0,0 +1,221 @@ +"""Build Environment used for isolation during sdist building +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + +import logging +import os +import sys +import textwrap +from collections import OrderedDict +from distutils.sysconfig import get_python_lib +from sysconfig import get_paths + +from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet + +from pip import __file__ as pip_location +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.ui import open_spinner + +if MYPY_CHECK_RUNNING: + from typing import Tuple, Set, Iterable, Optional, List + from pip._internal.index.package_finder import PackageFinder + +logger = logging.getLogger(__name__) + + +class _Prefix: + + def __init__(self, path): + # type: (str) -> None + self.path = path + self.setup = False + self.bin_dir = get_paths( + 'nt' if os.name == 'nt' else 'posix_prefix', + vars={'base': path, 'platbase': path} + )['scripts'] + # Note: prefer distutils' sysconfig to get the + # library paths so PyPy is correctly supported. + purelib = get_python_lib(plat_specific=False, prefix=path) + platlib = get_python_lib(plat_specific=True, prefix=path) + if purelib == platlib: + self.lib_dirs = [purelib] + else: + self.lib_dirs = [purelib, platlib] + + +class BuildEnvironment(object): + """Creates and manages an isolated environment to install build deps + """ + + def __init__(self): + # type: () -> None + self._temp_dir = TempDirectory(kind="build-env") + + self._prefixes = OrderedDict(( + (name, _Prefix(os.path.join(self._temp_dir.path, name))) + for name in ('normal', 'overlay') + )) + + self._bin_dirs = [] # type: List[str] + self._lib_dirs = [] # type: List[str] + for prefix in reversed(list(self._prefixes.values())): + self._bin_dirs.append(prefix.bin_dir) + self._lib_dirs.extend(prefix.lib_dirs) + + # Customize site to: + # - ensure .pth files are honored + # - prevent access to system site packages + system_sites = { + os.path.normcase(site) for site in ( + get_python_lib(plat_specific=False), + get_python_lib(plat_specific=True), + ) + } + self._site_dir = os.path.join(self._temp_dir.path, 'site') + if not os.path.exists(self._site_dir): + os.mkdir(self._site_dir) + with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp: + fp.write(textwrap.dedent( + ''' + import os, site, sys + + # First, drop system-sites related paths. + original_sys_path = sys.path[:] + known_paths = set() + for path in {system_sites!r}: + site.addsitedir(path, known_paths=known_paths) + system_paths = set( + os.path.normcase(path) + for path in sys.path[len(original_sys_path):] + ) + original_sys_path = [ + path for path in original_sys_path + if os.path.normcase(path) not in system_paths + ] + sys.path = original_sys_path + + # Second, add lib directories. + # ensuring .pth file are processed. + for path in {lib_dirs!r}: + assert not path in sys.path + site.addsitedir(path) + ''' + ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)) + + def __enter__(self): + self._save_env = { + name: os.environ.get(name, None) + for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH') + } + + path = self._bin_dirs[:] + old_path = self._save_env['PATH'] + if old_path: + path.extend(old_path.split(os.pathsep)) + + pythonpath = [self._site_dir] + + os.environ.update({ + 'PATH': os.pathsep.join(path), + 'PYTHONNOUSERSITE': '1', + 'PYTHONPATH': os.pathsep.join(pythonpath), + }) + + def __exit__(self, exc_type, exc_val, exc_tb): + for varname, old_value in self._save_env.items(): + if old_value is None: + os.environ.pop(varname, None) + else: + os.environ[varname] = old_value + + def cleanup(self): + # type: () -> None + self._temp_dir.cleanup() + + def check_requirements(self, reqs): + # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]] + """Return 2 sets: + - conflicting requirements: set of (installed, wanted) reqs tuples + - missing requirements: set of reqs + """ + missing = set() + conflicting = set() + if reqs: + ws = WorkingSet(self._lib_dirs) + for req in reqs: + try: + if ws.find(Requirement.parse(req)) is None: + missing.add(req) + except VersionConflict as e: + conflicting.add((str(e.args[0].as_requirement()), + str(e.args[1]))) + return conflicting, missing + + def install_requirements( + self, + finder, # type: PackageFinder + requirements, # type: Iterable[str] + prefix_as_string, # type: str + message # type: Optional[str] + ): + # type: (...) -> None + prefix = self._prefixes[prefix_as_string] + assert not prefix.setup + prefix.setup = True + if not requirements: + return + args = [ + sys.executable, os.path.dirname(pip_location), 'install', + '--ignore-installed', '--no-user', '--prefix', prefix.path, + '--no-warn-script-location', + ] # type: List[str] + if logger.getEffectiveLevel() <= logging.DEBUG: + args.append('-v') + for format_control in ('no_binary', 'only_binary'): + formats = getattr(finder.format_control, format_control) + args.extend(('--' + format_control.replace('_', '-'), + ','.join(sorted(formats or {':none:'})))) + + index_urls = finder.index_urls + if index_urls: + args.extend(['-i', index_urls[0]]) + for extra_index in index_urls[1:]: + args.extend(['--extra-index-url', extra_index]) + else: + args.append('--no-index') + for link in finder.find_links: + args.extend(['--find-links', link]) + + for host in finder.trusted_hosts: + args.extend(['--trusted-host', host]) + if finder.allow_all_prereleases: + args.append('--pre') + args.append('--') + args.extend(requirements) + with open_spinner(message) as spinner: + call_subprocess(args, spinner=spinner) + + +class NoOpBuildEnvironment(BuildEnvironment): + """A no-op drop-in replacement for BuildEnvironment + """ + + def __init__(self): + pass + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_val, exc_tb): + pass + + def cleanup(self): + pass + + def install_requirements(self, finder, requirements, prefix, message): + raise NotImplementedError() diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cache.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cache.py new file mode 100644 index 0000000000000000000000000000000000000000..abecd78f8d988dd5856855aff3a89ab270ca73a9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cache.py @@ -0,0 +1,329 @@ +"""Cache Management +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import hashlib +import json +import logging +import os + +from pip._vendor.packaging.tags import interpreter_name, interpreter_version +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import InvalidWheelFilename +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url + +if MYPY_CHECK_RUNNING: + from typing import Optional, Set, List, Any, Dict + + from pip._vendor.packaging.tags import Tag + + from pip._internal.models.format_control import FormatControl + +logger = logging.getLogger(__name__) + + +def _hash_dict(d): + # type: (Dict[str, str]) -> str + """Return a stable sha224 of a dictionary.""" + s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True) + return hashlib.sha224(s.encode("ascii")).hexdigest() + + +class Cache(object): + """An abstract class - provides cache directories for data from links + + + :param cache_dir: The root of the cache. + :param format_control: An object of FormatControl class to limit + binaries being read from the cache. + :param allowed_formats: which formats of files the cache should store. + ('binary' and 'source' are the only allowed values) + """ + + def __init__(self, cache_dir, format_control, allowed_formats): + # type: (str, FormatControl, Set[str]) -> None + super(Cache, self).__init__() + assert not cache_dir or os.path.isabs(cache_dir) + self.cache_dir = cache_dir or None + self.format_control = format_control + self.allowed_formats = allowed_formats + + _valid_formats = {"source", "binary"} + assert self.allowed_formats.union(_valid_formats) == _valid_formats + + def _get_cache_path_parts_legacy(self, link): + # type: (Link) -> List[str] + """Get parts of part that must be os.path.joined with cache_dir + + Legacy cache key (pip < 20) for compatibility with older caches. + """ + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = [link.url_without_fragment] + if link.hash_name is not None and link.hash is not None: + key_parts.append("=".join([link.hash_name, link.hash])) + key_url = "#".join(key_parts) + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = hashlib.sha224(key_url.encode()).hexdigest() + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_cache_path_parts(self, link): + # type: (Link) -> List[str] + """Get parts of part that must be os.path.joined with cache_dir + """ + + # We want to generate an url to use as our cache key, we don't want to + # just re-use the URL because it might have other items in the fragment + # and we don't care about those. + key_parts = {"url": link.url_without_fragment} + if link.hash_name is not None and link.hash is not None: + key_parts[link.hash_name] = link.hash + if link.subdirectory_fragment: + key_parts["subdirectory"] = link.subdirectory_fragment + + # Include interpreter name, major and minor version in cache key + # to cope with ill-behaved sdists that build a different wheel + # depending on the python version their setup.py is being run on, + # and don't encode the difference in compatibility tags. + # https://github.com/pypa/pip/issues/7296 + key_parts["interpreter_name"] = interpreter_name() + key_parts["interpreter_version"] = interpreter_version() + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and + # thus less secure). However the differences don't make a lot of + # difference for our use case here. + hashed = _hash_dict(key_parts) + + # We want to nest the directories some to prevent having a ton of top + # level directories where we might run out of sub directories on some + # FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + return parts + + def _get_candidates(self, link, canonical_package_name): + # type: (Link, Optional[str]) -> List[Any] + can_not_cache = ( + not self.cache_dir or + not canonical_package_name or + not link + ) + if can_not_cache: + return [] + + formats = self.format_control.get_allowed_formats( + canonical_package_name + ) + if not self.allowed_formats.intersection(formats): + return [] + + candidates = [] + path = self.get_path_for_link(link) + if os.path.isdir(path): + for candidate in os.listdir(path): + candidates.append((candidate, path)) + # TODO remove legacy path lookup in pip>=21 + legacy_path = self.get_path_for_link_legacy(link) + if os.path.isdir(legacy_path): + for candidate in os.listdir(legacy_path): + candidates.append((candidate, legacy_path)) + return candidates + + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + raise NotImplementedError() + + def get_path_for_link(self, link): + # type: (Link) -> str + """Return a directory to store cached items in for link. + """ + raise NotImplementedError() + + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link + """Returns a link to a cached item if it exists, otherwise returns the + passed link. + """ + raise NotImplementedError() + + def cleanup(self): + # type: () -> None + pass + + +class SimpleWheelCache(Cache): + """A cache of wheels for future installs. + """ + + def __init__(self, cache_dir, format_control): + # type: (str, FormatControl) -> None + super(SimpleWheelCache, self).__init__( + cache_dir, format_control, {"binary"} + ) + + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + parts = self._get_cache_path_parts_legacy(link) + return os.path.join(self.cache_dir, "wheels", *parts) + + def get_path_for_link(self, link): + # type: (Link) -> str + """Return a directory to store cached wheels for link + + Because there are M wheels for any one sdist, we provide a directory + to cache them in, and then consult that directory when looking up + cache hits. + + We only insert things into the cache if they have plausible version + numbers, so that we don't contaminate the cache with things that were + not unique. E.g. ./package might have dozens of installs done for it + and build a version of 0.0...and if we built and cached a wheel, we'd + end up using the same wheel even if the source has been edited. + + :param link: The link of the sdist for which this will cache wheels. + """ + parts = self._get_cache_path_parts(link) + + # Store wheels within the root cache_dir + return os.path.join(self.cache_dir, "wheels", *parts) + + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link + candidates = [] + + if not package_name: + return link + + canonical_package_name = canonicalize_name(package_name) + for wheel_name, wheel_dir in self._get_candidates( + link, canonical_package_name + ): + try: + wheel = Wheel(wheel_name) + except InvalidWheelFilename: + continue + if canonicalize_name(wheel.name) != canonical_package_name: + logger.debug( + "Ignoring cached wheel {} for {} as it " + "does not match the expected distribution name {}.".format( + wheel_name, link, package_name + ) + ) + continue + if not wheel.supported(supported_tags): + # Built for a different python/arch/etc + continue + candidates.append( + ( + wheel.support_index_min(supported_tags), + wheel_name, + wheel_dir, + ) + ) + + if not candidates: + return link + + _, wheel_name, wheel_dir = min(candidates) + return Link(path_to_url(os.path.join(wheel_dir, wheel_name))) + + +class EphemWheelCache(SimpleWheelCache): + """A SimpleWheelCache that creates it's own temporary cache directory + """ + + def __init__(self, format_control): + # type: (FormatControl) -> None + self._temp_dir = TempDirectory(kind="ephem-wheel-cache") + + super(EphemWheelCache, self).__init__( + self._temp_dir.path, format_control + ) + + def cleanup(self): + # type: () -> None + self._temp_dir.cleanup() + + +class WheelCache(Cache): + """Wraps EphemWheelCache and SimpleWheelCache into a single Cache + + This Cache allows for gracefully degradation, using the ephem wheel cache + when a certain link is not found in the simple wheel cache first. + """ + + def __init__(self, cache_dir, format_control): + # type: (str, FormatControl) -> None + super(WheelCache, self).__init__( + cache_dir, format_control, {'binary'} + ) + self._wheel_cache = SimpleWheelCache(cache_dir, format_control) + self._ephem_cache = EphemWheelCache(format_control) + + def get_path_for_link_legacy(self, link): + # type: (Link) -> str + return self._wheel_cache.get_path_for_link_legacy(link) + + def get_path_for_link(self, link): + # type: (Link) -> str + return self._wheel_cache.get_path_for_link(link) + + def get_ephem_path_for_link(self, link): + # type: (Link) -> str + return self._ephem_cache.get_path_for_link(link) + + def get( + self, + link, # type: Link + package_name, # type: Optional[str] + supported_tags, # type: List[Tag] + ): + # type: (...) -> Link + retval = self._wheel_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + if retval is not link: + return retval + + return self._ephem_cache.get( + link=link, + package_name=package_name, + supported_tags=supported_tags, + ) + + def cleanup(self): + # type: () -> None + self._wheel_cache.cleanup() + self._ephem_cache.cleanup() diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e589bb917e23823e25f9fff7e0849c4d6d4a62bc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__init__.py @@ -0,0 +1,4 @@ +"""Subpackage containing all of pip's command line interface related code +""" + +# This file intentionally does not import submodules diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..134ed1e511bef248783d2922ff2f98bf316d40fa Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ed02eb660622a02a1e7d3c2f58734dd81b1b703 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..31b9aa8c797e424132e56bf6a44278baca4898a0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..103f7f2d8ad795543d0d1a8a74ac178fcd3222d8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93f96970d711ac943f68211a55066d24331ddb71 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab1fc3b9d2db1dedc9e67ca7eabff35458422679 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e697ecb5cec659697d5ca72ba0ba85ee0cfcd8d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/parser.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/parser.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c9c1f95d7c120d38c84aab73b1a3ae5000ae316 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/parser.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19071cde064a65510d27423609f28b758bad330f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25f0373c735e89bc9b0a2656da7a85aac89ce074 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py new file mode 100644 index 0000000000000000000000000000000000000000..329de602513d7bb868799a49d36d3f081a79e441 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py @@ -0,0 +1,164 @@ +"""Logic that powers autocompletion installed by ``pip completion``. +""" + +import optparse +import os +import sys +from itertools import chain + +from pip._internal.cli.main_parser import create_main_parser +from pip._internal.commands import commands_dict, create_command +from pip._internal.utils.misc import get_installed_distributions +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Iterable, List, Optional + + +def autocomplete(): + # type: () -> None + """Entry Point for completion of main and subcommand options. + """ + # Don't complete if user hasn't sourced bash_completion file. + if 'PIP_AUTO_COMPLETE' not in os.environ: + return + cwords = os.environ['COMP_WORDS'].split()[1:] + cword = int(os.environ['COMP_CWORD']) + try: + current = cwords[cword - 1] + except IndexError: + current = '' + + parser = create_main_parser() + subcommands = list(commands_dict) + options = [] + + # subcommand + subcommand_name = None # type: Optional[str] + for word in cwords: + if word in subcommands: + subcommand_name = word + break + # subcommand options + if subcommand_name is not None: + # special case: 'help' subcommand has no options + if subcommand_name == 'help': + sys.exit(1) + # special case: list locally installed dists for show and uninstall + should_list_installed = ( + subcommand_name in ['show', 'uninstall'] and + not current.startswith('-') + ) + if should_list_installed: + installed = [] + lc = current.lower() + for dist in get_installed_distributions(local_only=True): + if dist.key.startswith(lc) and dist.key not in cwords[1:]: + installed.append(dist.key) + # if there are no dists installed, fall back to option completion + if installed: + for dist in installed: + print(dist) + sys.exit(1) + + subcommand = create_command(subcommand_name) + + for opt in subcommand.parser.option_list_all: + if opt.help != optparse.SUPPRESS_HELP: + for opt_str in opt._long_opts + opt._short_opts: + options.append((opt_str, opt.nargs)) + + # filter out previously specified options from available options + prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] + options = [(x, v) for (x, v) in options if x not in prev_opts] + # filter options by current input + options = [(k, v) for k, v in options if k.startswith(current)] + # get completion type given cwords and available subcommand options + completion_type = get_path_completion_type( + cwords, cword, subcommand.parser.option_list_all, + ) + # get completion files and directories if ``completion_type`` is + # ``<file>``, ``<dir>`` or ``<path>`` + if completion_type: + paths = auto_complete_paths(current, completion_type) + options = [(path, 0) for path in paths] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1] and option[0][:2] == "--": + opt_label += '=' + print(opt_label) + else: + # show main parser options only when necessary + + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + flattened_opts = chain.from_iterable(opts) + if current.startswith('-'): + for opt in flattened_opts: + if opt.help != optparse.SUPPRESS_HELP: + subcommands += opt._long_opts + opt._short_opts + else: + # get completion type given cwords and all available options + completion_type = get_path_completion_type(cwords, cword, + flattened_opts) + if completion_type: + subcommands = list(auto_complete_paths(current, + completion_type)) + + print(' '.join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def get_path_completion_type(cwords, cword, opts): + # type: (List[str], int, Iterable[Any]) -> Optional[str] + """Get the type of path completion (``file``, ``dir``, ``path`` or None) + + :param cwords: same as the environmental variable ``COMP_WORDS`` + :param cword: same as the environmental variable ``COMP_CWORD`` + :param opts: The available options to check + :return: path completion type (``file``, ``dir``, ``path`` or None) + """ + if cword < 2 or not cwords[cword - 2].startswith('-'): + return None + for opt in opts: + if opt.help == optparse.SUPPRESS_HELP: + continue + for o in str(opt).split('/'): + if cwords[cword - 2].split('=')[0] == o: + if not opt.metavar or any( + x in ('path', 'file', 'dir') + for x in opt.metavar.split('/')): + return opt.metavar + return None + + +def auto_complete_paths(current, completion_type): + # type: (str, str) -> Iterable[str] + """If ``completion_type`` is ``file`` or ``path``, list all regular files + and directories starting with ``current``; otherwise only list directories + starting with ``current``. + + :param current: The word to be completed + :param completion_type: path completion type(`file`, `path` or `dir`)i + :return: A generator of regular files and/or directories + """ + directory, filename = os.path.split(current) + current_path = os.path.abspath(directory) + # Don't complete paths if they can't be accessed + if not os.access(current_path, os.R_OK): + return + filename = os.path.normcase(filename) + # list all files that start with ``filename`` + file_list = (x for x in os.listdir(current_path) + if os.path.normcase(x).startswith(filename)) + for f in file_list: + opt = os.path.join(current_path, f) + comp_file = os.path.normcase(os.path.join(directory, f)) + # complete regular files when there is not ``<dir>`` after option + # complete directories when there is ``<file>``, ``<path>`` or + # ``<dir>``after option + if completion_type != 'dir' and os.path.isfile(opt): + yield comp_file + elif os.path.isdir(opt): + yield os.path.join(comp_file, '') diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/base_command.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/base_command.py new file mode 100644 index 0000000000000000000000000000000000000000..628faa3eee0e441b8fed0eea9c6e4b74222ebb3f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/base_command.py @@ -0,0 +1,226 @@ +"""Base Command class, and related routines""" + +from __future__ import absolute_import, print_function + +import logging +import logging.config +import optparse +import os +import platform +import sys +import traceback + +from pip._internal.cli import cmdoptions +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.cli.parser import ( + ConfigOptionParser, + UpdatingDefaultsHelpFormatter, +) +from pip._internal.cli.status_codes import ( + ERROR, + PREVIOUS_BUILD_DIR_ERROR, + SUCCESS, + UNKNOWN_ERROR, + VIRTUALENV_NOT_FOUND, +) +from pip._internal.exceptions import ( + BadCommand, + CommandError, + InstallationError, + PreviousBuildDirError, + UninstallationError, +) +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.filesystem import check_path_owner +from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging +from pip._internal.utils.misc import get_prog, normalize_path +from pip._internal.utils.temp_dir import global_tempdir_manager +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.virtualenv import running_under_virtualenv + +if MYPY_CHECK_RUNNING: + from typing import List, Tuple, Any + from optparse import Values + +__all__ = ['Command'] + +logger = logging.getLogger(__name__) + + +class Command(CommandContextMixIn): + usage = None # type: str + ignore_require_venv = False # type: bool + + def __init__(self, name, summary, isolated=False): + # type: (str, str, bool) -> None + super(Command, self).__init__() + parser_kw = { + 'usage': self.usage, + 'prog': '%s %s' % (get_prog(), name), + 'formatter': UpdatingDefaultsHelpFormatter(), + 'add_help_option': False, + 'name': name, + 'description': self.__doc__, + 'isolated': isolated, + } + + self.name = name + self.summary = summary + self.parser = ConfigOptionParser(**parser_kw) + + # Commands should add options to this option group + optgroup_name = '%s Options' % self.name.capitalize() + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group( + cmdoptions.general_group, + self.parser, + ) + self.parser.add_option_group(gen_opts) + + def handle_pip_version_check(self, options): + # type: (Values) -> None + """ + This is a no-op so that commands by default do not do the pip version + check. + """ + # Make sure we do the pip version check if the index_group options + # are present. + assert not hasattr(options, 'no_index') + + def run(self, options, args): + # type: (Values, List[Any]) -> Any + raise NotImplementedError + + def parse_args(self, args): + # type: (List[str]) -> Tuple[Any, Any] + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args): + # type: (List[str]) -> int + try: + with self.main_context(): + return self._main(args) + finally: + logging.shutdown() + + def _main(self, args): + # type: (List[str]) -> int + # Intentionally set as early as possible so globally-managed temporary + # directories are available to the rest of the code. + self.enter_context(global_tempdir_manager()) + + options, args = self.parse_args(args) + + # Set verbosity so that it can be used elsewhere. + self.verbosity = options.verbose - options.quiet + + level_number = setup_logging( + verbosity=self.verbosity, + no_color=options.no_color, + user_log_file=options.log, + ) + + if ( + sys.version_info[:2] == (2, 7) and + not options.no_python_version_warning + ): + message = ( + "A future version of pip will drop support for Python 2.7. " + "More details about Python 2 support in pip, can be found at " + "https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa + ) + if platform.python_implementation() == "CPython": + message = ( + "Python 2.7 reached the end of its life on January " + "1st, 2020. Please upgrade your Python as Python 2.7 " + "is no longer maintained. " + ) + message + deprecated(message, replacement=None, gone_in=None) + + if options.skip_requirements_regex: + deprecated( + "--skip-requirements-regex is unsupported and will be removed", + replacement=( + "manage requirements/constraints files explicitly, " + "possibly generating them from metadata" + ), + gone_in="20.1", + issue=7297, + ) + + # TODO: Try to get these passing down from the command? + # without resorting to os.environ to hold these. + # This also affects isolated builds and it should. + + if options.no_input: + os.environ['PIP_NO_INPUT'] = '1' + + if options.exists_action: + os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) + + if options.require_venv and not self.ignore_require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.critical( + 'Could not find an activated virtualenv (required).' + ) + sys.exit(VIRTUALENV_NOT_FOUND) + + if options.cache_dir: + options.cache_dir = normalize_path(options.cache_dir) + if not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "or is not writable by the current user. The cache " + "has been disabled. Check the permissions and owner of " + "that directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + try: + status = self.run(options, args) + # FIXME: all commands should return an exit status + # and when it is done, isinstance is not needed anymore + if isinstance(status, int): + return status + except PreviousBuildDirError as exc: + logger.critical(str(exc)) + logger.debug('Exception information:', exc_info=True) + + return PREVIOUS_BUILD_DIR_ERROR + except (InstallationError, UninstallationError, BadCommand) as exc: + logger.critical(str(exc)) + logger.debug('Exception information:', exc_info=True) + + return ERROR + except CommandError as exc: + logger.critical('%s', exc) + logger.debug('Exception information:', exc_info=True) + + return ERROR + except BrokenStdoutLoggingError: + # Bypass our logger and write any remaining messages to stderr + # because stdout no longer works. + print('ERROR: Pipe to stdout was broken', file=sys.stderr) + if level_number <= logging.DEBUG: + traceback.print_exc(file=sys.stderr) + + return ERROR + except KeyboardInterrupt: + logger.critical('Operation cancelled by user') + logger.debug('Exception information:', exc_info=True) + + return ERROR + except BaseException: + logger.critical('Exception:', exc_info=True) + + return UNKNOWN_ERROR + finally: + self.handle_pip_version_check(options) + + return SUCCESS diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/cmdoptions.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/cmdoptions.py new file mode 100644 index 0000000000000000000000000000000000000000..447f3191887dba6e1893c93a0c5ee77de88f1074 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/cmdoptions.py @@ -0,0 +1,957 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them +globally. One reason being that options with action='append' can carry state +between parses. pip parses general options twice internally, and shouldn't +pass on state. To be consistent, all options will follow this design. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import logging +import os +import textwrap +import warnings +from distutils.util import strtobool +from functools import partial +from optparse import SUPPRESS_HELP, Option, OptionGroup +from textwrap import dedent + +from pip._internal.exceptions import CommandError +from pip._internal.locations import USER_CACHE_DIR, get_src_prefix +from pip._internal.models.format_control import FormatControl +from pip._internal.models.index import PyPI +from pip._internal.models.target_python import TargetPython +from pip._internal.utils.hashes import STRONG_HASHES +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.ui import BAR_TYPES + +if MYPY_CHECK_RUNNING: + from typing import Any, Callable, Dict, Optional, Tuple + from optparse import OptionParser, Values + from pip._internal.cli.parser import ConfigOptionParser + +logger = logging.getLogger(__name__) + + +def raise_option_error(parser, option, msg): + # type: (OptionParser, Option, str) -> None + """ + Raise an option parsing error using parser.error(). + + Args: + parser: an OptionParser instance. + option: an Option instance. + msg: the error text. + """ + msg = '{} error: {}'.format(option, msg) + msg = textwrap.fill(' '.join(msg.split())) + parser.error(msg) + + +def make_option_group(group, parser): + # type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group['name']) + for option in group['options']: + option_group.add_option(option()) + return option_group + + +def check_install_build_global(options, check_options=None): + # type: (Values, Optional[Values]) -> None + """Disable wheels if per-setup.py call options are set. + + :param options: The OptionParser options to update. + :param check_options: The options to check, if not supplied defaults to + options. + """ + if check_options is None: + check_options = options + + def getname(n): + # type: (str) -> Optional[Any] + return getattr(check_options, n, None) + names = ["build_options", "global_options", "install_options"] + if any(map(getname, names)): + control = options.format_control + control.disallow_binaries() + warnings.warn( + 'Disabling all use of wheels due to the use of --build-option ' + '/ --global-option / --install-option.', stacklevel=2, + ) + + +def check_dist_restriction(options, check_target=False): + # type: (Values, bool) -> None + """Function for determining if custom platform options are allowed. + + :param options: The OptionParser options. + :param check_target: Whether or not to check if --target is being used. + """ + dist_restriction_set = any([ + options.python_version, + options.platform, + options.abi, + options.implementation, + ]) + + binary_only = FormatControl(set(), {':all:'}) + sdist_dependencies_allowed = ( + options.format_control != binary_only and + not options.ignore_dependencies + ) + + # Installations or downloads using dist restrictions must not combine + # source distributions and dist-specific wheels, as they are not + # guaranteed to be locally compatible. + if dist_restriction_set and sdist_dependencies_allowed: + raise CommandError( + "When restricting platform and interpreter constraints using " + "--python-version, --platform, --abi, or --implementation, " + "either --no-deps must be set, or --only-binary=:all: must be " + "set and --no-binary must not be set (or must be set to " + ":none:)." + ) + + if check_target: + if dist_restriction_set and not options.target_dir: + raise CommandError( + "Can not use any platform or abi specific options unless " + "installing via '--target'" + ) + + +def _path_option_check(option, opt, value): + # type: (Option, str, str) -> str + return os.path.expanduser(value) + + +class PipOption(Option): + TYPES = Option.TYPES + ("path",) + TYPE_CHECKER = Option.TYPE_CHECKER.copy() + TYPE_CHECKER["path"] = _path_option_check + + +########### +# options # +########### + +help_ = partial( + Option, + '-h', '--help', + dest='help', + action='help', + help='Show help.', +) # type: Callable[..., Option] + +isolated_mode = partial( + Option, + "--isolated", + dest="isolated_mode", + action="store_true", + default=False, + help=( + "Run pip in an isolated mode, ignoring environment variables and user " + "configuration." + ), +) # type: Callable[..., Option] + +require_virtualenv = partial( + Option, + # Run only if inside a virtualenv, bail if not. + '--require-virtualenv', '--require-venv', + dest='require_venv', + action='store_true', + default=False, + help=SUPPRESS_HELP +) # type: Callable[..., Option] + +verbose = partial( + Option, + '-v', '--verbose', + dest='verbose', + action='count', + default=0, + help='Give more output. Option is additive, and can be used up to 3 times.' +) # type: Callable[..., Option] + +no_color = partial( + Option, + '--no-color', + dest='no_color', + action='store_true', + default=False, + help="Suppress colored output", +) # type: Callable[..., Option] + +version = partial( + Option, + '-V', '--version', + dest='version', + action='store_true', + help='Show version and exit.', +) # type: Callable[..., Option] + +quiet = partial( + Option, + '-q', '--quiet', + dest='quiet', + action='count', + default=0, + help=( + 'Give less output. Option is additive, and can be used up to 3' + ' times (corresponding to WARNING, ERROR, and CRITICAL logging' + ' levels).' + ), +) # type: Callable[..., Option] + +progress_bar = partial( + Option, + '--progress-bar', + dest='progress_bar', + type='choice', + choices=list(BAR_TYPES.keys()), + default='on', + help=( + 'Specify type of progress to be displayed [' + + '|'.join(BAR_TYPES.keys()) + '] (default: %default)' + ), +) # type: Callable[..., Option] + +log = partial( + PipOption, + "--log", "--log-file", "--local-log", + dest="log", + metavar="path", + type="path", + help="Path to a verbose appending log." +) # type: Callable[..., Option] + +no_input = partial( + Option, + # Don't ask for input + '--no-input', + dest='no_input', + action='store_true', + default=False, + help=SUPPRESS_HELP +) # type: Callable[..., Option] + +proxy = partial( + Option, + '--proxy', + dest='proxy', + type='str', + default='', + help="Specify a proxy in the form [user:passwd@]proxy.server:port." +) # type: Callable[..., Option] + +retries = partial( + Option, + '--retries', + dest='retries', + type='int', + default=5, + help="Maximum number of retries each connection should attempt " + "(default %default times).", +) # type: Callable[..., Option] + +timeout = partial( + Option, + '--timeout', '--default-timeout', + metavar='sec', + dest='timeout', + type='float', + default=15, + help='Set the socket timeout (default %default seconds).', +) # type: Callable[..., Option] + +skip_requirements_regex = partial( + Option, + # A regex to be used to skip requirements + '--skip-requirements-regex', + dest='skip_requirements_regex', + type='str', + default='', + help=SUPPRESS_HELP, +) # type: Callable[..., Option] + + +def exists_action(): + # type: () -> Option + return Option( + # Option when path already exist + '--exists-action', + dest='exists_action', + type='choice', + choices=['s', 'i', 'w', 'b', 'a'], + default=[], + action='append', + metavar='action', + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.", + ) + + +cert = partial( + PipOption, + '--cert', + dest='cert', + type='path', + metavar='path', + help="Path to alternate CA bundle.", +) # type: Callable[..., Option] + +client_cert = partial( + PipOption, + '--client-cert', + dest='client_cert', + type='path', + default=None, + metavar='path', + help="Path to SSL client certificate, a single file containing the " + "private key and the certificate in PEM format.", +) # type: Callable[..., Option] + +index_url = partial( + Option, + '-i', '--index-url', '--pypi-url', + dest='index_url', + metavar='URL', + default=PyPI.simple_url, + help="Base URL of the Python Package Index (default %default). " + "This should point to a repository compliant with PEP 503 " + "(the simple repository API) or a local directory laid out " + "in the same format.", +) # type: Callable[..., Option] + + +def extra_index_url(): + # type: () -> Option + return Option( + '--extra-index-url', + dest='extra_index_urls', + metavar='URL', + action='append', + default=[], + help="Extra URLs of package indexes to use in addition to " + "--index-url. Should follow the same rules as " + "--index-url.", + ) + + +no_index = partial( + Option, + '--no-index', + dest='no_index', + action='store_true', + default=False, + help='Ignore package index (only looking at --find-links URLs instead).', +) # type: Callable[..., Option] + + +def find_links(): + # type: () -> Option + return Option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='url', + help="If a url or path to an html file, then parse for links to " + "archives. If a local path or file:// url that's a directory, " + "then look for archives in the directory listing.", + ) + + +def trusted_host(): + # type: () -> Option + return Option( + "--trusted-host", + dest="trusted_hosts", + action="append", + metavar="HOSTNAME", + default=[], + help="Mark this host or host:port pair as trusted, even though it " + "does not have valid or any HTTPS.", + ) + + +def constraints(): + # type: () -> Option + return Option( + '-c', '--constraint', + dest='constraints', + action='append', + default=[], + metavar='file', + help='Constrain versions using the given constraints file. ' + 'This option can be used multiple times.' + ) + + +def requirements(): + # type: () -> Option + return Option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Install from the given requirements file. ' + 'This option can be used multiple times.' + ) + + +def editable(): + # type: () -> Option + return Option( + '-e', '--editable', + dest='editables', + action='append', + default=[], + metavar='path/url', + help=('Install a project in editable mode (i.e. setuptools ' + '"develop mode") from a local project path or a VCS url.'), + ) + + +def _handle_src(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + +src = partial( + PipOption, + '--src', '--source', '--source-dir', '--source-directory', + dest='src_dir', + type='path', + metavar='dir', + default=get_src_prefix(), + action='callback', + callback=_handle_src, + help='Directory to check out editable projects into. ' + 'The default in a virtualenv is "<venv path>/src". ' + 'The default for global installs is "<current dir>/src".' +) # type: Callable[..., Option] + + +def _get_format_control(values, option): + # type: (Values, Option) -> Any + """Get a format_control object.""" + return getattr(values, option.dest) + + +def _handle_no_binary(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, existing.no_binary, existing.only_binary, + ) + + +def _handle_only_binary(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + existing = _get_format_control(parser.values, option) + FormatControl.handle_mutual_excludes( + value, existing.only_binary, existing.no_binary, + ) + + +def no_binary(): + # type: () -> Option + format_control = FormatControl(set(), set()) + return Option( + "--no-binary", dest="format_control", action="callback", + callback=_handle_no_binary, type="str", + default=format_control, + help="Do not use binary packages. Can be supplied multiple times, and " + "each time adds to the existing value. Accepts either :all: to " + "disable all binary packages, :none: to empty the set, or one or " + "more package names with commas between them (no colons). Note " + "that some packages are tricky to compile and may fail to " + "install when this option is used on them.", + ) + + +def only_binary(): + # type: () -> Option + format_control = FormatControl(set(), set()) + return Option( + "--only-binary", dest="format_control", action="callback", + callback=_handle_only_binary, type="str", + default=format_control, + help="Do not use source packages. Can be supplied multiple times, and " + "each time adds to the existing value. Accepts either :all: to " + "disable all source packages, :none: to empty the set, or one or " + "more package names with commas between them. Packages without " + "binary distributions will fail to install when this option is " + "used on them.", + ) + + +platform = partial( + Option, + '--platform', + dest='platform', + metavar='platform', + default=None, + help=("Only use wheels compatible with <platform>. " + "Defaults to the platform of the running system."), +) # type: Callable[..., Option] + + +# This was made a separate function for unit-testing purposes. +def _convert_python_version(value): + # type: (str) -> Tuple[Tuple[int, ...], Optional[str]] + """ + Convert a version string like "3", "37", or "3.7.3" into a tuple of ints. + + :return: A 2-tuple (version_info, error_msg), where `error_msg` is + non-None if and only if there was a parsing error. + """ + if not value: + # The empty string is the same as not providing a value. + return (None, None) + + parts = value.split('.') + if len(parts) > 3: + return ((), 'at most three version parts are allowed') + + if len(parts) == 1: + # Then we are in the case of "3" or "37". + value = parts[0] + if len(value) > 1: + parts = [value[0], value[1:]] + + try: + version_info = tuple(int(part) for part in parts) + except ValueError: + return ((), 'each version part must be an integer') + + return (version_info, None) + + +def _handle_python_version(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + """ + Handle a provided --python-version value. + """ + version_info, error_msg = _convert_python_version(value) + if error_msg is not None: + msg = ( + 'invalid --python-version value: {!r}: {}'.format( + value, error_msg, + ) + ) + raise_option_error(parser, option=option, msg=msg) + + parser.values.python_version = version_info + + +python_version = partial( + Option, + '--python-version', + dest='python_version', + metavar='python_version', + action='callback', + callback=_handle_python_version, type='str', + default=None, + help=dedent("""\ + The Python interpreter version to use for wheel and "Requires-Python" + compatibility checks. Defaults to a version derived from the running + interpreter. The version can be specified using up to three dot-separated + integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor + version can also be given as a string without dots (e.g. "37" for 3.7.0). + """), +) # type: Callable[..., Option] + + +implementation = partial( + Option, + '--implementation', + dest='implementation', + metavar='implementation', + default=None, + help=("Only use wheels compatible with Python " + "implementation <implementation>, e.g. 'pp', 'jy', 'cp', " + " or 'ip'. If not specified, then the current " + "interpreter implementation is used. Use 'py' to force " + "implementation-agnostic wheels."), +) # type: Callable[..., Option] + + +abi = partial( + Option, + '--abi', + dest='abi', + metavar='abi', + default=None, + help=("Only use wheels compatible with Python " + "abi <abi>, e.g. 'pypy_41'. If not specified, then the " + "current interpreter abi tag is used. Generally " + "you will need to specify --implementation, " + "--platform, and --python-version when using " + "this option."), +) # type: Callable[..., Option] + + +def add_target_python_options(cmd_opts): + # type: (OptionGroup) -> None + cmd_opts.add_option(platform()) + cmd_opts.add_option(python_version()) + cmd_opts.add_option(implementation()) + cmd_opts.add_option(abi()) + + +def make_target_python(options): + # type: (Values) -> TargetPython + target_python = TargetPython( + platform=options.platform, + py_version_info=options.python_version, + abi=options.abi, + implementation=options.implementation, + ) + + return target_python + + +def prefer_binary(): + # type: () -> Option + return Option( + "--prefer-binary", + dest="prefer_binary", + action="store_true", + default=False, + help="Prefer older binary packages over newer source packages." + ) + + +cache_dir = partial( + PipOption, + "--cache-dir", + dest="cache_dir", + default=USER_CACHE_DIR, + metavar="dir", + type='path', + help="Store the cache data in <dir>." +) # type: Callable[..., Option] + + +def _handle_no_cache_dir(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None + """ + Process a value provided for the --no-cache-dir option. + + This is an optparse.Option callback for the --no-cache-dir option. + """ + # The value argument will be None if --no-cache-dir is passed via the + # command-line, since the option doesn't accept arguments. However, + # the value can be non-None if the option is triggered e.g. by an + # environment variable, like PIP_NO_CACHE_DIR=true. + if value is not None: + # Then parse the string value to get argument error-checking. + try: + strtobool(value) + except ValueError as exc: + raise_option_error(parser, option=option, msg=str(exc)) + + # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool() + # converted to 0 (like "false" or "no") caused cache_dir to be disabled + # rather than enabled (logic would say the latter). Thus, we disable + # the cache directory not just on values that parse to True, but (for + # backwards compatibility reasons) also on values that parse to False. + # In other words, always set it to False if the option is provided in + # some (valid) form. + parser.values.cache_dir = False + + +no_cache = partial( + Option, + "--no-cache-dir", + dest="cache_dir", + action="callback", + callback=_handle_no_cache_dir, + help="Disable the cache.", +) # type: Callable[..., Option] + +no_deps = partial( + Option, + '--no-deps', '--no-dependencies', + dest='ignore_dependencies', + action='store_true', + default=False, + help="Don't install package dependencies.", +) # type: Callable[..., Option] + + +def _handle_build_dir(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None + if value: + value = os.path.abspath(value) + setattr(parser.values, option.dest, value) + + +build_dir = partial( + PipOption, + '-b', '--build', '--build-dir', '--build-directory', + dest='build_dir', + type='path', + metavar='dir', + action='callback', + callback=_handle_build_dir, + help='Directory to unpack packages into and build in. Note that ' + 'an initial build still takes place in a temporary directory. ' + 'The location of temporary directories can be controlled by setting ' + 'the TMPDIR environment variable (TEMP on Windows) appropriately. ' + 'When passed, build directories are not cleaned in case of failures.' +) # type: Callable[..., Option] + +ignore_requires_python = partial( + Option, + '--ignore-requires-python', + dest='ignore_requires_python', + action='store_true', + help='Ignore the Requires-Python information.' +) # type: Callable[..., Option] + +no_build_isolation = partial( + Option, + '--no-build-isolation', + dest='build_isolation', + action='store_false', + default=True, + help='Disable isolation when building a modern source distribution. ' + 'Build dependencies specified by PEP 518 must be already installed ' + 'if this option is used.' +) # type: Callable[..., Option] + + +def _handle_no_use_pep517(option, opt, value, parser): + # type: (Option, str, str, OptionParser) -> None + """ + Process a value provided for the --no-use-pep517 option. + + This is an optparse.Option callback for the no_use_pep517 option. + """ + # Since --no-use-pep517 doesn't accept arguments, the value argument + # will be None if --no-use-pep517 is passed via the command-line. + # However, the value can be non-None if the option is triggered e.g. + # by an environment variable, for example "PIP_NO_USE_PEP517=true". + if value is not None: + msg = """A value was passed for --no-use-pep517, + probably using either the PIP_NO_USE_PEP517 environment variable + or the "no-use-pep517" config file option. Use an appropriate value + of the PIP_USE_PEP517 environment variable or the "use-pep517" + config file option instead. + """ + raise_option_error(parser, option=option, msg=msg) + + # Otherwise, --no-use-pep517 was passed via the command-line. + parser.values.use_pep517 = False + + +use_pep517 = partial( + Option, + '--use-pep517', + dest='use_pep517', + action='store_true', + default=None, + help='Use PEP 517 for building source distributions ' + '(use --no-use-pep517 to force legacy behaviour).' +) # type: Any + +no_use_pep517 = partial( + Option, + '--no-use-pep517', + dest='use_pep517', + action='callback', + callback=_handle_no_use_pep517, + default=None, + help=SUPPRESS_HELP +) # type: Any + +install_options = partial( + Option, + '--install-option', + dest='install_options', + action='append', + metavar='options', + help="Extra arguments to be supplied to the setup.py install " + "command (use like --install-option=\"--install-scripts=/usr/local/" + "bin\"). Use multiple --install-option options to pass multiple " + "options to setup.py install. If you are using an option with a " + "directory path, be sure to use absolute path.", +) # type: Callable[..., Option] + +global_options = partial( + Option, + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the install command.", +) # type: Callable[..., Option] + +no_clean = partial( + Option, + '--no-clean', + action='store_true', + default=False, + help="Don't clean up build directories." +) # type: Callable[..., Option] + +pre = partial( + Option, + '--pre', + action='store_true', + default=False, + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.", +) # type: Callable[..., Option] + +disable_pip_version_check = partial( + Option, + "--disable-pip-version-check", + dest="disable_pip_version_check", + action="store_true", + default=True, + help="Don't periodically check PyPI to determine whether a new version " + "of pip is available for download. Implied with --no-index.", +) # type: Callable[..., Option] + + +# Deprecated, Remove later +always_unzip = partial( + Option, + '-Z', '--always-unzip', + dest='always_unzip', + action='store_true', + help=SUPPRESS_HELP, +) # type: Callable[..., Option] + + +def _handle_merge_hash(option, opt_str, value, parser): + # type: (Option, str, str, OptionParser) -> None + """Given a value spelled "algo:digest", append the digest to a list + pointed to in a dict by the algo name.""" + if not parser.values.hashes: + parser.values.hashes = {} + try: + algo, digest = value.split(':', 1) + except ValueError: + parser.error('Arguments to %s must be a hash name ' + 'followed by a value, like --hash=sha256:abcde...' % + opt_str) + if algo not in STRONG_HASHES: + parser.error('Allowed hash algorithms for %s are %s.' % + (opt_str, ', '.join(STRONG_HASHES))) + parser.values.hashes.setdefault(algo, []).append(digest) + + +hash = partial( + Option, + '--hash', + # Hash values eventually end up in InstallRequirement.hashes due to + # __dict__ copying in process_line(). + dest='hashes', + action='callback', + callback=_handle_merge_hash, + type='string', + help="Verify that the package's archive matches this " + 'hash before installing. Example: --hash=sha256:abcdef...', +) # type: Callable[..., Option] + + +require_hashes = partial( + Option, + '--require-hashes', + dest='require_hashes', + action='store_true', + default=False, + help='Require a hash to check each requirement against, for ' + 'repeatable installs. This option is implied when any package in a ' + 'requirements file has a --hash option.', +) # type: Callable[..., Option] + + +list_path = partial( + PipOption, + '--path', + dest='path', + type='path', + action='append', + help='Restrict to the specified installation path for listing ' + 'packages (can be used multiple times).' +) # type: Callable[..., Option] + + +def check_list_path_option(options): + # type: (Values) -> None + if options.path and (options.user or options.local): + raise CommandError( + "Cannot combine '--path' with '--user' or '--local'" + ) + + +no_python_version_warning = partial( + Option, + '--no-python-version-warning', + dest='no_python_version_warning', + action='store_true', + default=False, + help='Silence deprecation warnings for upcoming unsupported Pythons.', +) # type: Callable[..., Option] + + +########## +# groups # +########## + +general_group = { + 'name': 'General Options', + 'options': [ + help_, + isolated_mode, + require_virtualenv, + verbose, + version, + quiet, + log, + no_input, + proxy, + retries, + timeout, + skip_requirements_regex, + exists_action, + trusted_host, + cert, + client_cert, + cache_dir, + no_cache, + disable_pip_version_check, + no_color, + no_python_version_warning, + ] +} # type: Dict[str, Any] + +index_group = { + 'name': 'Package Index Options', + 'options': [ + index_url, + extra_index_url, + no_index, + find_links, + ] +} # type: Dict[str, Any] diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/command_context.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/command_context.py new file mode 100644 index 0000000000000000000000000000000000000000..d1a64a776062a95258d3331cdec9b987e433ddf9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/command_context.py @@ -0,0 +1,36 @@ +from contextlib import contextmanager + +from pip._vendor.contextlib2 import ExitStack + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterator, ContextManager, TypeVar + + _T = TypeVar('_T', covariant=True) + + +class CommandContextMixIn(object): + def __init__(self): + # type: () -> None + super(CommandContextMixIn, self).__init__() + self._in_main_context = False + self._main_context = ExitStack() + + @contextmanager + def main_context(self): + # type: () -> Iterator[None] + assert not self._in_main_context + + self._in_main_context = True + try: + with self._main_context: + yield + finally: + self._in_main_context = False + + def enter_context(self, context_provider): + # type: (ContextManager[_T]) -> _T + assert self._in_main_context + + return self._main_context.enter_context(context_provider) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/main.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/main.py new file mode 100644 index 0000000000000000000000000000000000000000..5e97a5103f6af5baded5758f0ee41eb1aa641cc7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/main.py @@ -0,0 +1,75 @@ +"""Primary application entrypoint. +""" +from __future__ import absolute_import + +import locale +import logging +import os +import sys + +from pip._internal.cli.autocompletion import autocomplete +from pip._internal.cli.main_parser import parse_command +from pip._internal.commands import create_command +from pip._internal.exceptions import PipError +from pip._internal.utils import deprecation +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + +logger = logging.getLogger(__name__) + + +# Do not import and use main() directly! Using it directly is actively +# discouraged by pip's maintainers. The name, location and behavior of +# this function is subject to change, so calling it directly is not +# portable across different pip versions. + +# In addition, running pip in-process is unsupported and unsafe. This is +# elaborated in detail at +# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program. +# That document also provides suggestions that should work for nearly +# all users that are considering importing and using main() directly. + +# However, we know that certain users will still want to invoke pip +# in-process. If you understand and accept the implications of using pip +# in an unsupported manner, the best approach is to use runpy to avoid +# depending on the exact location of this entry point. + +# The following example shows how to use runpy to invoke pip in that +# case: +# +# sys.argv = ["pip", your, args, here] +# runpy.run_module("pip", run_name="__main__") +# +# Note that this will exit the process after running, unlike a direct +# call to main. As it is not safe to do any processing after calling +# main, this should not be an issue in practice. + +def main(args=None): + # type: (Optional[List[str]]) -> int + if args is None: + args = sys.argv[1:] + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parse_command(args) + except PipError as exc: + sys.stderr.write("ERROR: %s" % exc) + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip._internal.utils.encoding.auto_decode + try: + locale.setlocale(locale.LC_ALL, '') + except locale.Error as e: + # setlocale can apparently crash if locale are uninitialized + logger.debug("Ignoring error %s when setting locale", e) + command = create_command(cmd_name, isolated=("--isolated" in cmd_args)) + + return command.main(cmd_args) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/main_parser.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/main_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..a89821d44890ee9a89b186c66c9ce12d5ccc02dc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/main_parser.py @@ -0,0 +1,99 @@ +"""A single place for constructing and exposing the main parser +""" + +import os +import sys + +from pip._internal.cli import cmdoptions +from pip._internal.cli.parser import ( + ConfigOptionParser, + UpdatingDefaultsHelpFormatter, +) +from pip._internal.commands import commands_dict, get_similar_commands +from pip._internal.exceptions import CommandError +from pip._internal.utils.misc import get_pip_version, get_prog +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Tuple, List + + +__all__ = ["create_main_parser", "parse_command"] + + +def create_main_parser(): + # type: () -> ConfigOptionParser + """Creates and returns the main parser for pip's CLI + """ + + parser_kw = { + 'usage': '\n%prog <command> [options]', + 'add_help_option': False, + 'formatter': UpdatingDefaultsHelpFormatter(), + 'name': 'global', + 'prog': get_prog(), + } + + parser = ConfigOptionParser(**parser_kw) + parser.disable_interspersed_args() + + parser.version = get_pip_version() + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + # so the help formatter knows + parser.main = True # type: ignore + + # create command listing for description + description = [''] + [ + '%-27s %s' % (name, command_info.summary) + for name, command_info in commands_dict.items() + ] + parser.description = '\n'.join(description) + + return parser + + +def parse_command(args): + # type: (List[str]) -> Tuple[str, List[str]] + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this + # call is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --version + if general_options.version: + sys.stdout.write(parser.version) # type: ignore + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == 'help' and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0] + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + # all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(cmd_name) + + return cmd_name, cmd_args diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/parser.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/parser.py new file mode 100644 index 0000000000000000000000000000000000000000..c99456bae88d0c73ab79a671b440993c8195568d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/parser.py @@ -0,0 +1,265 @@ +"""Base option parser setup""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import optparse +import sys +import textwrap +from distutils.util import strtobool + +from pip._vendor.six import string_types + +from pip._internal.cli.status_codes import UNKNOWN_ERROR +from pip._internal.configuration import Configuration, ConfigurationError +from pip._internal.utils.compat import get_terminal_size + +logger = logging.getLogger(__name__) + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args, **kwargs): + # help position must be aligned with __init__.parseopts.description + kwargs['max_help_position'] = 30 + kwargs['indent_increment'] = 1 + kwargs['width'] = get_terminal_size()[0] - 2 + optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) + + def format_option_strings(self, option): + return self._format_option_strings(option, ' <%s>', ', ') + + def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt % metavar.lower()) + + return ''.join(opts) + + def format_heading(self, heading): + if heading == 'Options': + return '' + return heading + ':\n' + + def format_usage(self, usage): + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") + return msg + + def format_description(self, description): + # leave full control over description to us + if description: + if hasattr(self.parser, 'main'): + label = 'Commands' + else: + label = 'Description' + # some doc strings have initial newlines, some don't + description = description.lstrip('\n') + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = '%s:\n%s\n' % (label, description) + return description + else: + return '' + + def format_epilog(self, epilog): + # leave full control over epilog to us + if epilog: + return epilog + else: + return '' + + def indent_lines(self, text, indent): + new_lines = [indent + line for line in text.split('\n')] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser. + + This is updates the defaults before expanding them, allowing + them to show up correctly in the help listing. + """ + + def expand_default(self, option): + if self.parser is not None: + self.parser._update_defaults(self.parser.defaults) + return optparse.IndentedHelpFormatter.expand_default(self, option) + + +class CustomOptionParser(optparse.OptionParser): + + def insert_option_group(self, idx, *args, **kwargs): + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self): + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + def __init__(self, *args, **kwargs): + self.name = kwargs.pop('name') + + isolated = kwargs.pop("isolated", False) + self.config = Configuration(isolated) + + assert self.name + optparse.OptionParser.__init__(self, *args, **kwargs) + + def check_default(self, option, key, val): + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: + print("An error occurred during configuration: %s" % exc) + sys.exit(3) + + def _get_ordered_configuration_items(self): + # Configuration gives keys in an unordered manner. Order them. + override_order = ["global", self.name, ":env:"] + + # Pool the options into different groups + section_items = {name: [] for name in override_order} + for section_key, val in self.config.items(): + # ignore empty values + if not val: + logger.debug( + "Ignoring configuration key '%s' as it's value is empty.", + section_key + ) + continue + + section, key = section_key.split(".", 1) + if section in override_order: + section_items[section].append((key, val)) + + # Yield each group in their override order + for section in override_order: + for key, val in section_items[section]: + yield key, val + + def _update_defaults(self, defaults): + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + + # Accumulate complex default state. + self.values = optparse.Values(self.defaults) + late_eval = set() + # Then set the options with those values + for key, val in self._get_ordered_configuration_items(): + # '--' because configuration supports only long names + option = self.get_option('--' + key) + + # Ignore options not present in this parser. E.g. non-globals put + # in [global] by users that want them to apply to all applicable + # commands. + if option is None: + continue + + if option.action in ('store_true', 'store_false', 'count'): + try: + val = strtobool(val) + except ValueError: + error_msg = invalid_config_error_message( + option.action, key, val + ) + self.error(error_msg) + + elif option.action == 'append': + val = val.split() + val = [self.check_default(option, key, v) for v in val] + elif option.action == 'callback': + late_eval.add(option.dest) + opt_str = option.get_opt_string() + val = option.convert_value(opt_str, val) + # From take_action + args = option.callback_args or () + kwargs = option.callback_kwargs or {} + option.callback(option, opt_str, val, self, *args, **kwargs) + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + + for key in late_eval: + defaults[key] = getattr(self.values, key) + self.values = None + return defaults + + def get_default_values(self): + """Overriding to make updating the defaults after instantiation of + the option parser possible, _update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + # Load the configuration, or error out in case of an error + try: + self.config.load() + except ConfigurationError as err: + self.exit(UNKNOWN_ERROR, str(err)) + + defaults = self._update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + default = defaults.get(option.dest) + if isinstance(default, string_types): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg): + self.print_usage(sys.stderr) + self.exit(UNKNOWN_ERROR, "%s\n" % msg) + + +def invalid_config_error_message(action, key, val): + """Returns a better error message when invalid configuration option + is provided.""" + if action in ('store_true', 'store_false'): + return ("{0} is not a valid value for {1} option, " + "please specify a boolean value like yes/no, " + "true/false or 1/0 instead.").format(val, key) + + return ("{0} is not a valid value for {1} option, " + "please specify a numerical value like 1/0 " + "instead.").format(val, key) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/req_command.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/req_command.py new file mode 100644 index 0000000000000000000000000000000000000000..9383b3b8dca756dea6a37b3f71cb3e556b60dfe9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/req_command.py @@ -0,0 +1,333 @@ +"""Contains the Command base classes that depend on PipSession. + +The classes in this module are in a separate module so the commands not +needing download / PackageFinder capability don't unnecessarily import the +PackageFinder machinery and all its vendored dependencies, etc. +""" + +import logging +import os +from functools import partial + +from pip._internal.cli.base_command import Command +from pip._internal.cli.command_context import CommandContextMixIn +from pip._internal.exceptions import CommandError +from pip._internal.index.package_finder import PackageFinder +from pip._internal.legacy_resolve import Resolver +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.network.download import Downloader +from pip._internal.network.session import PipSession +from pip._internal.operations.prepare import RequirementPreparer +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, + install_req_from_req_string, +) +from pip._internal.req.req_file import parse_requirements +from pip._internal.self_outdated_check import ( + make_link_collector, + pip_self_version_check, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import List, Optional, Tuple + from pip._internal.cache import WheelCache + from pip._internal.models.target_python import TargetPython + from pip._internal.req.req_set import RequirementSet + from pip._internal.req.req_tracker import RequirementTracker + from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class SessionCommandMixin(CommandContextMixIn): + + """ + A class mixin for command classes needing _build_session(). + """ + def __init__(self): + # type: () -> None + super(SessionCommandMixin, self).__init__() + self._session = None # Optional[PipSession] + + @classmethod + def _get_index_urls(cls, options): + # type: (Values) -> Optional[List[str]] + """Return a list of index urls from user-provided options.""" + index_urls = [] + if not getattr(options, "no_index", False): + url = getattr(options, "index_url", None) + if url: + index_urls.append(url) + urls = getattr(options, "extra_index_urls", None) + if urls: + index_urls.extend(urls) + # Return None rather than an empty list + return index_urls or None + + def get_default_session(self, options): + # type: (Values) -> PipSession + """Get a default-managed session.""" + if self._session is None: + self._session = self.enter_context(self._build_session(options)) + # there's no type annotation on requests.Session, so it's + # automatically ContextManager[Any] and self._session becomes Any, + # then https://github.com/python/mypy/issues/7696 kicks in + assert self._session is not None + return self._session + + def _build_session(self, options, retries=None, timeout=None): + # type: (Values, Optional[int], Optional[int]) -> PipSession + assert not options.cache_dir or os.path.isabs(options.cache_dir) + session = PipSession( + cache=( + os.path.join(options.cache_dir, "http") + if options.cache_dir else None + ), + retries=retries if retries is not None else options.retries, + trusted_hosts=options.trusted_hosts, + index_urls=self._get_index_urls(options), + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = ( + timeout if timeout is not None else options.timeout + ) + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + +class IndexGroupCommand(Command, SessionCommandMixin): + + """ + Abstract base class for commands with the index_group options. + + This also corresponds to the commands that permit the pip version check. + """ + + def handle_pip_version_check(self, options): + # type: (Values) -> None + """ + Do the pip version check if not disabled. + + This overrides the default behavior of not doing the check. + """ + # Make sure the index_group options are present. + assert hasattr(options, 'no_index') + + if options.disable_pip_version_check or options.no_index: + return + + # Otherwise, check if we're using the latest version of pip available. + session = self._build_session( + options, + retries=0, + timeout=min(5, options.timeout) + ) + with session: + pip_self_version_check(session, options) + + +class RequirementCommand(IndexGroupCommand): + + @staticmethod + def make_requirement_preparer( + temp_build_dir, # type: TempDirectory + options, # type: Values + req_tracker, # type: RequirementTracker + session, # type: PipSession + finder, # type: PackageFinder + use_user_site, # type: bool + download_dir=None, # type: str + wheel_download_dir=None, # type: str + ): + # type: (...) -> RequirementPreparer + """ + Create a RequirementPreparer instance for the given parameters. + """ + downloader = Downloader(session, progress_bar=options.progress_bar) + + temp_build_dir_path = temp_build_dir.path + assert temp_build_dir_path is not None + + return RequirementPreparer( + build_dir=temp_build_dir_path, + src_dir=options.src_dir, + download_dir=download_dir, + wheel_download_dir=wheel_download_dir, + build_isolation=options.build_isolation, + req_tracker=req_tracker, + downloader=downloader, + finder=finder, + require_hashes=options.require_hashes, + use_user_site=use_user_site, + ) + + @staticmethod + def make_resolver( + preparer, # type: RequirementPreparer + finder, # type: PackageFinder + options, # type: Values + wheel_cache=None, # type: Optional[WheelCache] + use_user_site=False, # type: bool + ignore_installed=True, # type: bool + ignore_requires_python=False, # type: bool + force_reinstall=False, # type: bool + upgrade_strategy="to-satisfy-only", # type: str + use_pep517=None, # type: Optional[bool] + py_version_info=None # type: Optional[Tuple[int, ...]] + ): + # type: (...) -> Resolver + """ + Create a Resolver instance for the given parameters. + """ + make_install_req = partial( + install_req_from_req_string, + isolated=options.isolated_mode, + wheel_cache=wheel_cache, + use_pep517=use_pep517, + ) + return Resolver( + preparer=preparer, + finder=finder, + make_install_req=make_install_req, + use_user_site=use_user_site, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=ignore_installed, + ignore_requires_python=ignore_requires_python, + force_reinstall=force_reinstall, + upgrade_strategy=upgrade_strategy, + py_version_info=py_version_info, + ) + + def populate_requirement_set( + self, + requirement_set, # type: RequirementSet + args, # type: List[str] + options, # type: Values + finder, # type: PackageFinder + session, # type: PipSession + wheel_cache, # type: Optional[WheelCache] + ): + # type: (...) -> None + """ + Marshal cmd line args into a requirement set. + """ + for filename in options.constraints: + for req_to_add in parse_requirements( + filename, + constraint=True, finder=finder, options=options, + session=session, wheel_cache=wheel_cache): + req_to_add.is_direct = True + requirement_set.add_requirement(req_to_add) + + for req in args: + req_to_add = install_req_from_line( + req, None, isolated=options.isolated_mode, + use_pep517=options.use_pep517, + wheel_cache=wheel_cache + ) + req_to_add.is_direct = True + requirement_set.add_requirement(req_to_add) + + for req in options.editables: + req_to_add = install_req_from_editable( + req, + isolated=options.isolated_mode, + use_pep517=options.use_pep517, + wheel_cache=wheel_cache + ) + req_to_add.is_direct = True + requirement_set.add_requirement(req_to_add) + + # NOTE: options.require_hashes may be set if --require-hashes is True + for filename in options.requirements: + for req_to_add in parse_requirements( + filename, + finder=finder, options=options, session=session, + wheel_cache=wheel_cache, + use_pep517=options.use_pep517): + req_to_add.is_direct = True + requirement_set.add_requirement(req_to_add) + + # If any requirement has hash options, enable hash checking. + requirements = ( + requirement_set.unnamed_requirements + + list(requirement_set.requirements.values()) + ) + if any(req.has_hash_options for req in requirements): + options.require_hashes = True + + if not (args or options.editables or options.requirements): + opts = {'name': self.name} + if options.find_links: + raise CommandError( + 'You must give at least one requirement to %(name)s ' + '(maybe you meant "pip %(name)s %(links)s"?)' % + dict(opts, links=' '.join(options.find_links))) + else: + raise CommandError( + 'You must give at least one requirement to %(name)s ' + '(see "pip help %(name)s")' % opts) + + @staticmethod + def trace_basic_info(finder): + # type: (PackageFinder) -> None + """ + Trace basic information about the provided objects. + """ + # Display where finder is looking for packages + search_scope = finder.search_scope + locations = search_scope.get_formatted_locations() + if locations: + logger.info(locations) + + def _build_package_finder( + self, + options, # type: Values + session, # type: PipSession + target_python=None, # type: Optional[TargetPython] + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> PackageFinder + """ + Create a package finder appropriate to this requirement command. + + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + link_collector = make_link_collector(session, options=options) + selection_prefs = SelectionPreferences( + allow_yanked=True, + format_control=options.format_control, + allow_all_prereleases=options.pre, + prefer_binary=options.prefer_binary, + ignore_requires_python=ignore_requires_python, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + target_python=target_python, + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/cli/status_codes.py b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/status_codes.py new file mode 100644 index 0000000000000000000000000000000000000000..275360a3175abaeab86148d61b735904f96d72f6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/cli/status_codes.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2a311f8fc8930735a39eee61cf701db1f2a35daa --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__init__.py @@ -0,0 +1,114 @@ +""" +Package containing all pip commands +""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import importlib +from collections import OrderedDict, namedtuple + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any + from pip._internal.cli.base_command import Command + + +CommandInfo = namedtuple('CommandInfo', 'module_path, class_name, summary') + +# The ordering matters for help display. +# Also, even though the module path starts with the same +# "pip._internal.commands" prefix in each case, we include the full path +# because it makes testing easier (specifically when modifying commands_dict +# in test setup / teardown by adding info for a FakeCommand class defined +# in a test-related module). +# Finally, we need to pass an iterable of pairs here rather than a dict +# so that the ordering won't be lost when using Python 2.7. +commands_dict = OrderedDict([ + ('install', CommandInfo( + 'pip._internal.commands.install', 'InstallCommand', + 'Install packages.', + )), + ('download', CommandInfo( + 'pip._internal.commands.download', 'DownloadCommand', + 'Download packages.', + )), + ('uninstall', CommandInfo( + 'pip._internal.commands.uninstall', 'UninstallCommand', + 'Uninstall packages.', + )), + ('freeze', CommandInfo( + 'pip._internal.commands.freeze', 'FreezeCommand', + 'Output installed packages in requirements format.', + )), + ('list', CommandInfo( + 'pip._internal.commands.list', 'ListCommand', + 'List installed packages.', + )), + ('show', CommandInfo( + 'pip._internal.commands.show', 'ShowCommand', + 'Show information about installed packages.', + )), + ('check', CommandInfo( + 'pip._internal.commands.check', 'CheckCommand', + 'Verify installed packages have compatible dependencies.', + )), + ('config', CommandInfo( + 'pip._internal.commands.configuration', 'ConfigurationCommand', + 'Manage local and global configuration.', + )), + ('search', CommandInfo( + 'pip._internal.commands.search', 'SearchCommand', + 'Search PyPI for packages.', + )), + ('wheel', CommandInfo( + 'pip._internal.commands.wheel', 'WheelCommand', + 'Build wheels from your requirements.', + )), + ('hash', CommandInfo( + 'pip._internal.commands.hash', 'HashCommand', + 'Compute hashes of package archives.', + )), + ('completion', CommandInfo( + 'pip._internal.commands.completion', 'CompletionCommand', + 'A helper command used for command completion.', + )), + ('debug', CommandInfo( + 'pip._internal.commands.debug', 'DebugCommand', + 'Show information useful for debugging.', + )), + ('help', CommandInfo( + 'pip._internal.commands.help', 'HelpCommand', + 'Show help for commands.', + )), +]) # type: OrderedDict[str, CommandInfo] + + +def create_command(name, **kwargs): + # type: (str, **Any) -> Command + """ + Create an instance of the Command class with the given name. + """ + module_path, class_name, summary = commands_dict[name] + module = importlib.import_module(module_path) + command_class = getattr(module, class_name) + command = command_class(name=name, summary=summary, **kwargs) + + return command + + +def get_similar_commands(name): + """Command name auto-correct.""" + from difflib import get_close_matches + + name = name.lower() + + close_commands = get_close_matches(name, commands_dict.keys()) + + if close_commands: + return close_commands[0] + else: + return False diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ac51c30954a26f3b691ecc05687a86433dfe2d4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/check.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/check.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..707b36c745bdb8e6e596a206eca7fd3e1ceaa8f4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/check.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/completion.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/completion.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..236977c5ef16998d634d8d7e8b177f4eb4d298c3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/completion.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e3fae0c1d5694e796607115e458fde2d7e66987 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/debug.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/debug.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..37184cca923061d23a973b78716eb1b3dc67807c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/debug.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/download.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/download.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..466ef7edc986d132ec09d2bdcda6cc0878f2f832 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/download.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e966665365b30f69faea5b592961492196ac29bd Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/hash.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/hash.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9fd6e835712a4cebeea9324cd1b954912ecfc7a2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/hash.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/help.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/help.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..555e20d3e69716bb4edff0b27be0c589d7f69eb6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/help.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/install.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/install.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8e12e279583e5ce78357d6688e882ec9c57441e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/install.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/list.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/list.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1daa71ab03af6f4472f5aa7a13fb6efb4eaee763 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/list.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/search.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/search.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c933cd578058bcfcf9806240221cfff4ad4bed3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/search.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/show.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/show.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f62667fc6bea4326495231f64b98001ca2894ee Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/show.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4ed2ff75e72ffe0df84518584d5913f75ef0e648 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f32ab4fe38ab3accc7257361283f2b6e7c71d162 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/check.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/check.py new file mode 100644 index 0000000000000000000000000000000000000000..968944611ea7e284dac912c36d63816c7ca585b4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/check.py @@ -0,0 +1,45 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +import logging + +from pip._internal.cli.base_command import Command +from pip._internal.operations.check import ( + check_package_set, + create_package_set_from_installed, +) +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class CheckCommand(Command): + """Verify installed packages have compatible dependencies.""" + + usage = """ + %prog [options]""" + + def run(self, options, args): + package_set, parsing_probs = create_package_set_from_installed() + missing, conflicting = check_package_set(package_set) + + for project_name in missing: + version = package_set[project_name].version + for dependency in missing[project_name]: + write_output( + "%s %s requires %s, which is not installed.", + project_name, version, dependency[0], + ) + + for project_name in conflicting: + version = package_set[project_name].version + for dep_name, dep_version, req in conflicting[project_name]: + write_output( + "%s %s has requirement %s, but you have %s %s.", + project_name, version, req, dep_name, dep_version, + ) + + if missing or conflicting or parsing_probs: + return 1 + else: + write_output("No broken requirements found.") diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/completion.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/completion.py new file mode 100644 index 0000000000000000000000000000000000000000..c532806e3866e652063c92226716c11edbf116b1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/completion.py @@ -0,0 +1,96 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import sys +import textwrap + +from pip._internal.cli.base_command import Command +from pip._internal.utils.misc import get_prog + +BASE_COMPLETION = """ +# pip %(shell)s completion start%(script)s# pip %(shell)s completion end +""" + +COMPLETION_SCRIPTS = { + 'bash': """ + _pip_completion() + { + COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) ) + } + complete -o default -F _pip_completion %(prog)s + """, + 'zsh': """ + function _pip_completion { + local words cword + read -Ac words + read -cn cword + reply=( $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$(( cword-1 )) \\ + PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )) + } + compctl -K _pip_completion %(prog)s + """, + 'fish': """ + function __fish_complete_pip + set -lx COMP_WORDS (commandline -o) "" + set -lx COMP_CWORD ( \\ + math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ + ) + set -lx PIP_AUTO_COMPLETE 1 + string split \\ -- (eval $COMP_WORDS[1]) + end + complete -fa "(__fish_complete_pip)" -c %(prog)s + """, +} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + + ignore_require_venv = True + + def __init__(self, *args, **kw): + super(CompletionCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '--bash', '-b', + action='store_const', + const='bash', + dest='shell', + help='Emit completion code for bash') + cmd_opts.add_option( + '--zsh', '-z', + action='store_const', + const='zsh', + dest='shell', + help='Emit completion code for zsh') + cmd_opts.add_option( + '--fish', '-f', + action='store_const', + const='fish', + dest='shell', + help='Emit completion code for fish') + + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ['--' + shell for shell in sorted(shells)] + if options.shell in shells: + script = textwrap.dedent( + COMPLETION_SCRIPTS.get(options.shell, '') % { + 'prog': get_prog(), + } + ) + print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) + else: + sys.stderr.write( + 'ERROR: You must pass %s\n' % ' or '.join(shell_options) + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/configuration.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/configuration.py new file mode 100644 index 0000000000000000000000000000000000000000..efcf5bb3699f9e9c3111adb6975dabaa8887b08f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/configuration.py @@ -0,0 +1,233 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +import logging +import os +import subprocess + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.configuration import ( + Configuration, + get_configuration_files, + kinds, +) +from pip._internal.exceptions import PipError +from pip._internal.utils.misc import get_prog, write_output + +logger = logging.getLogger(__name__) + + +class ConfigurationCommand(Command): + """Manage local and global configuration. + + Subcommands: + + list: List the active configuration (or from the file specified) + edit: Edit the configuration file in an editor + get: Get the value associated with name + set: Set the name=value + unset: Unset the value associated with name + + If none of --user, --global and --site are passed, a virtual + environment configuration file is used if one is active and the file + exists. Otherwise, all modifications happen on the to the user file by + default. + """ + + ignore_require_venv = True + usage = """ + %prog [<file-option>] list + %prog [<file-option>] [--editor <editor-path>] edit + + %prog [<file-option>] get name + %prog [<file-option>] set name value + %prog [<file-option>] unset name + """ + + def __init__(self, *args, **kwargs): + super(ConfigurationCommand, self).__init__(*args, **kwargs) + + self.configuration = None + + self.cmd_opts.add_option( + '--editor', + dest='editor', + action='store', + default=None, + help=( + 'Editor to use to edit the file. Uses VISUAL or EDITOR ' + 'environment variables if not provided.' + ) + ) + + self.cmd_opts.add_option( + '--global', + dest='global_file', + action='store_true', + default=False, + help='Use the system-wide configuration file only' + ) + + self.cmd_opts.add_option( + '--user', + dest='user_file', + action='store_true', + default=False, + help='Use the user configuration file only' + ) + + self.cmd_opts.add_option( + '--site', + dest='site_file', + action='store_true', + default=False, + help='Use the current environment configuration file only' + ) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + handlers = { + "list": self.list_values, + "edit": self.open_in_editor, + "get": self.get_name, + "set": self.set_name_value, + "unset": self.unset_name + } + + # Determine action + if not args or args[0] not in handlers: + logger.error("Need an action ({}) to perform.".format( + ", ".join(sorted(handlers))) + ) + return ERROR + + action = args[0] + + # Determine which configuration files are to be loaded + # Depends on whether the command is modifying. + try: + load_only = self._determine_file( + options, need_value=(action in ["get", "set", "unset", "edit"]) + ) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + # Load a new configuration + self.configuration = Configuration( + isolated=options.isolated_mode, load_only=load_only + ) + self.configuration.load() + + # Error handling happens here, not in the action-handlers. + try: + handlers[action](options, args[1:]) + except PipError as e: + logger.error(e.args[0]) + return ERROR + + return SUCCESS + + def _determine_file(self, options, need_value): + file_options = [key for key, value in ( + (kinds.USER, options.user_file), + (kinds.GLOBAL, options.global_file), + (kinds.SITE, options.site_file), + ) if value] + + if not file_options: + if not need_value: + return None + # Default to user, unless there's a site file. + elif any( + os.path.exists(site_config_file) + for site_config_file in get_configuration_files()[kinds.SITE] + ): + return kinds.SITE + else: + return kinds.USER + elif len(file_options) == 1: + return file_options[0] + + raise PipError( + "Need exactly one file to operate upon " + "(--user, --site, --global) to perform." + ) + + def list_values(self, options, args): + self._get_n_args(args, "list", n=0) + + for key, value in sorted(self.configuration.items()): + write_output("%s=%r", key, value) + + def get_name(self, options, args): + key = self._get_n_args(args, "get [name]", n=1) + value = self.configuration.get_value(key) + + write_output("%s", value) + + def set_name_value(self, options, args): + key, value = self._get_n_args(args, "set [name] [value]", n=2) + self.configuration.set_value(key, value) + + self._save_configuration() + + def unset_name(self, options, args): + key = self._get_n_args(args, "unset [name]", n=1) + self.configuration.unset_value(key) + + self._save_configuration() + + def open_in_editor(self, options, args): + editor = self._determine_editor(options) + + fname = self.configuration.get_file_to_edit() + if fname is None: + raise PipError("Could not determine appropriate file.") + + try: + subprocess.check_call([editor, fname]) + except subprocess.CalledProcessError as e: + raise PipError( + "Editor Subprocess exited with exit code {}" + .format(e.returncode) + ) + + def _get_n_args(self, args, example, n): + """Helper to make sure the command got the right number of arguments + """ + if len(args) != n: + msg = ( + 'Got unexpected number of arguments, expected {}. ' + '(example: "{} config {}")' + ).format(n, get_prog(), example) + raise PipError(msg) + + if n == 1: + return args[0] + else: + return args + + def _save_configuration(self): + # We successfully ran a modifying command. Need to save the + # configuration. + try: + self.configuration.save() + except Exception: + logger.error( + "Unable to save configuration. Please report this as a bug.", + exc_info=1 + ) + raise PipError("Internal Error.") + + def _determine_editor(self, options): + if options.editor is not None: + return options.editor + elif "VISUAL" in os.environ: + return os.environ["VISUAL"] + elif "EDITOR" in os.environ: + return os.environ["EDITOR"] + else: + raise PipError("Could not determine editor to use.") diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/debug.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/debug.py new file mode 100644 index 0000000000000000000000000000000000000000..fe93b3a3926653c481c77830a4775585d2c488bd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/debug.py @@ -0,0 +1,142 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import locale +import logging +import os +import sys + +from pip._vendor.certifi import where + +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import get_pip_version +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, List, Optional + from optparse import Values + +logger = logging.getLogger(__name__) + + +def show_value(name, value): + # type: (str, Optional[str]) -> None + logger.info('{}: {}'.format(name, value)) + + +def show_sys_implementation(): + # type: () -> None + logger.info('sys.implementation:') + if hasattr(sys, 'implementation'): + implementation = sys.implementation # type: ignore + implementation_name = implementation.name + else: + implementation_name = '' + + with indent_log(): + show_value('name', implementation_name) + + +def show_tags(options): + # type: (Values) -> None + tag_limit = 10 + + target_python = make_target_python(options) + tags = target_python.get_tags() + + # Display the target options that were explicitly provided. + formatted_target = target_python.format_given() + suffix = '' + if formatted_target: + suffix = ' (target: {})'.format(formatted_target) + + msg = 'Compatible tags: {}{}'.format(len(tags), suffix) + logger.info(msg) + + if options.verbose < 1 and len(tags) > tag_limit: + tags_limited = True + tags = tags[:tag_limit] + else: + tags_limited = False + + with indent_log(): + for tag in tags: + logger.info(str(tag)) + + if tags_limited: + msg = ( + '...\n' + '[First {tag_limit} tags shown. Pass --verbose to show all.]' + ).format(tag_limit=tag_limit) + logger.info(msg) + + +def ca_bundle_info(config): + levels = set() + for key, value in config.items(): + levels.add(key.split('.')[0]) + + if not levels: + return "Not specified" + + levels_that_override_global = ['install', 'wheel', 'download'] + global_overriding_level = [ + level for level in levels if level in levels_that_override_global + ] + if not global_overriding_level: + return 'global' + + levels.remove('global') + return ", ".join(levels) + + +class DebugCommand(Command): + """ + Display debug information. + """ + + usage = """ + %prog <options>""" + ignore_require_venv = True + + def __init__(self, *args, **kw): + super(DebugCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + cmdoptions.add_target_python_options(cmd_opts) + self.parser.insert_option_group(0, cmd_opts) + self.parser.config.load() + + def run(self, options, args): + # type: (Values, List[Any]) -> int + logger.warning( + "This command is only meant for debugging. " + "Do not use this with automation for parsing and getting these " + "details, since the output and options of this command may " + "change without notice." + ) + show_value('pip version', get_pip_version()) + show_value('sys.version', sys.version) + show_value('sys.executable', sys.executable) + show_value('sys.getdefaultencoding', sys.getdefaultencoding()) + show_value('sys.getfilesystemencoding', sys.getfilesystemencoding()) + show_value( + 'locale.getpreferredencoding', locale.getpreferredencoding(), + ) + show_value('sys.platform', sys.platform) + show_sys_implementation() + + show_value("'cert' config value", ca_bundle_info(self.parser.config)) + show_value("REQUESTS_CA_BUNDLE", os.environ.get('REQUESTS_CA_BUNDLE')) + show_value("CURL_CA_BUNDLE", os.environ.get('CURL_CA_BUNDLE')) + show_value("pip._vendor.certifi.where()", where()) + + show_tags(options) + + return SUCCESS diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/download.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/download.py new file mode 100644 index 0000000000000000000000000000000000000000..24da3eb2a263217e068d815e73d0f78f64a89398 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/download.py @@ -0,0 +1,147 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import os + +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand +from pip._internal.req import RequirementSet +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path, write_output +from pip._internal.utils.temp_dir import TempDirectory + +logger = logging.getLogger(__name__) + + +class DownloadCommand(RequirementCommand): + """ + Download packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports downloading from "requirements files", which provide + an easy way to specify a whole environment to be downloaded. + """ + + usage = """ + %prog [options] <requirement specifier> [package-index-options] ... + %prog [options] -r <requirements file> [package-index-options] ... + %prog [options] <vcs project url> ... + %prog [options] <local project path> ... + %prog [options] <archive url/path> ...""" + + def __init__(self, *args, **kw): + super(DownloadCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.build_dir()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.global_options()) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.prefer_binary()) + cmd_opts.add_option(cmdoptions.src()) + cmd_opts.add_option(cmdoptions.pre()) + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + cmd_opts.add_option(cmdoptions.progress_bar()) + cmd_opts.add_option(cmdoptions.no_build_isolation()) + cmd_opts.add_option(cmdoptions.use_pep517()) + cmd_opts.add_option(cmdoptions.no_use_pep517()) + + cmd_opts.add_option( + '-d', '--dest', '--destination-dir', '--destination-directory', + dest='download_dir', + metavar='dir', + default=os.curdir, + help=("Download packages into <dir>."), + ) + + cmdoptions.add_target_python_options(cmd_opts) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + options.ignore_installed = True + # editable doesn't really make sense for `pip download`, but the bowels + # of the RequirementSet code require that property. + options.editables = [] + + cmdoptions.check_dist_restriction(options) + + options.download_dir = normalize_path(options.download_dir) + + ensure_dir(options.download_dir) + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ) + build_delete = (not (options.no_clean or options.build_dir)) + + with get_requirement_tracker() as req_tracker, TempDirectory( + options.build_dir, delete=build_delete, kind="download" + ) as directory: + + requirement_set = RequirementSet() + self.populate_requirement_set( + requirement_set, + args, + options, + finder, + session, + None + ) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + download_dir=options.download_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + py_version_info=options.python_version, + ) + + self.trace_basic_info(finder) + + resolver.resolve(requirement_set) + + downloaded = ' '.join([ + req.name for req in requirement_set.successfully_downloaded + ]) + if downloaded: + write_output('Successfully downloaded %s', downloaded) + + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() + + return requirement_set diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/freeze.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/freeze.py new file mode 100644 index 0000000000000000000000000000000000000000..e96c0833f5f1e7f643a588cf6bf30e9ca2282bc5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/freeze.py @@ -0,0 +1,103 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import sys + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.base_command import Command +from pip._internal.models.format_control import FormatControl +from pip._internal.operations.freeze import freeze +from pip._internal.utils.compat import stdlib_pkgs + +DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel', 'pkg-resources'} + + +class FreezeCommand(Command): + """ + Output installed packages in requirements format. + + packages are listed in a case-insensitive sorted order. + """ + + usage = """ + %prog [options]""" + log_streams = ("ext://sys.stderr", "ext://sys.stderr") + + def __init__(self, *args, **kw): + super(FreezeCommand, self).__init__(*args, **kw) + + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help="Use the order in the given requirements file and its " + "comments when generating output. This option can be " + "used multiple times.") + self.cmd_opts.add_option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='URL', + help='URL for finding packages, which will be added to the ' + 'output.') + self.cmd_opts.add_option( + '-l', '--local', + dest='local', + action='store_true', + default=False, + help='If in a virtualenv that has global access, do not output ' + 'globally-installed packages.') + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + self.cmd_opts.add_option(cmdoptions.list_path()) + self.cmd_opts.add_option( + '--all', + dest='freeze_all', + action='store_true', + help='Do not skip these packages in the output:' + ' %s' % ', '.join(DEV_PKGS)) + self.cmd_opts.add_option( + '--exclude-editable', + dest='exclude_editable', + action='store_true', + help='Exclude editable package from output.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + format_control = FormatControl(set(), set()) + wheel_cache = WheelCache(options.cache_dir, format_control) + skip = set(stdlib_pkgs) + if not options.freeze_all: + skip.update(DEV_PKGS) + + cmdoptions.check_list_path_option(options) + + freeze_kwargs = dict( + requirement=options.requirements, + find_links=options.find_links, + local_only=options.local, + user_only=options.user, + paths=options.path, + skip_regex=options.skip_requirements_regex, + isolated=options.isolated_mode, + wheel_cache=wheel_cache, + skip=skip, + exclude_editable=options.exclude_editable, + ) + + try: + for line in freeze(**freeze_kwargs): + sys.stdout.write(line + '\n') + finally: + wheel_cache.cleanup() diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/hash.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/hash.py new file mode 100644 index 0000000000000000000000000000000000000000..1dc7fb0eac936b625c79d20a7ec8267179a15a29 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/hash.py @@ -0,0 +1,58 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import hashlib +import logging +import sys + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR +from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES +from pip._internal.utils.misc import read_chunks, write_output + +logger = logging.getLogger(__name__) + + +class HashCommand(Command): + """ + Compute a hash of a local package archive. + + These can be used with --hash in a requirements file to do repeatable + installs. + """ + + usage = '%prog [options] <file> ...' + ignore_require_venv = True + + def __init__(self, *args, **kw): + super(HashCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-a', '--algorithm', + dest='algorithm', + choices=STRONG_HASHES, + action='store', + default=FAVORITE_HASH, + help='The hash algorithm to use: one of %s' % + ', '.join(STRONG_HASHES)) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + self.parser.print_usage(sys.stderr) + return ERROR + + algorithm = options.algorithm + for path in args: + write_output('%s:\n--hash=%s:%s', + path, algorithm, _hash_of_file(path, algorithm)) + + +def _hash_of_file(path, algorithm): + """Return the hash digest of a file.""" + with open(path, 'rb') as archive: + hash = hashlib.new(algorithm) + for chunk in read_chunks(archive): + hash.update(chunk) + return hash.hexdigest() diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/help.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/help.py new file mode 100644 index 0000000000000000000000000000000000000000..75af999b41e676f3abca4e2278b06aa404a95479 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/help.py @@ -0,0 +1,41 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import SUCCESS +from pip._internal.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + + usage = """ + %prog <command>""" + ignore_require_venv = True + + def run(self, options, args): + from pip._internal.commands import ( + commands_dict, create_command, get_similar_commands, + ) + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + command = create_command(cmd_name) + command.parser.print_help() + + return SUCCESS diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/install.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/install.py new file mode 100644 index 0000000000000000000000000000000000000000..cb2fb280c986a60fd57ec34af5185e423169722e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/install.py @@ -0,0 +1,727 @@ +# The following comment should be removed at some point in the future. +# It's included for now because without it InstallCommand.run() has a +# couple errors where we have to know req.name is str rather than +# Optional[str] for the InstallRequirement req. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import errno +import logging +import operator +import os +import shutil +import site +from optparse import SUPPRESS_HELP + +from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.cmdoptions import make_target_python +from pip._internal.cli.req_command import RequirementCommand +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.exceptions import ( + CommandError, + InstallationError, + PreviousBuildDirError, +) +from pip._internal.locations import distutils_scheme +from pip._internal.operations.check import check_install_conflicts +from pip._internal.req import RequirementSet, install_given_reqs +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.distutils_args import parse_distutils_args +from pip._internal.utils.filesystem import test_writable_dir +from pip._internal.utils.misc import ( + ensure_dir, + get_installed_version, + protect_pip_from_modification_on_windows, + write_output, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.virtualenv import virtualenv_no_global +from pip._internal.wheel_builder import build, should_build_for_install_command + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import Any, Iterable, List, Optional + + from pip._internal.models.format_control import FormatControl + from pip._internal.req.req_install import InstallRequirement + from pip._internal.wheel_builder import BinaryAllowedPredicate + +from pip._internal.locations import running_under_virtualenv + +logger = logging.getLogger(__name__) + + +def get_check_binary_allowed(format_control): + # type: (FormatControl) -> BinaryAllowedPredicate + def check_binary_allowed(req): + # type: (InstallRequirement) -> bool + if req.use_pep517: + return True + canonical_name = canonicalize_name(req.name) + allowed_formats = format_control.get_allowed_formats(canonical_name) + return "binary" in allowed_formats + + return check_binary_allowed + + +class InstallCommand(RequirementCommand): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + + usage = """ + %prog [options] <requirement specifier> [package-index-options] ... + %prog [options] -r <requirements file> [package-index-options] ... + %prog [options] [-e] <vcs project url> ... + %prog [options] [-e] <local project path> ... + %prog [options] <archive url/path> ...""" + + def __init__(self, *args, **kw): + super(InstallCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.pre()) + + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option( + '-t', '--target', + dest='target_dir', + metavar='dir', + default=None, + help='Install packages into <dir>. ' + 'By default this will not replace existing files/folders in ' + '<dir>. Use --upgrade to replace existing packages in <dir> ' + 'with new versions.' + ) + cmdoptions.add_target_python_options(cmd_opts) + + cmd_opts.add_option( + '--user', + dest='use_user_site', + action='store_true', + help="Install to the Python user install directory for your " + "platform. Typically ~/.local/, or %APPDATA%\\Python on " + "Windows. (See the Python documentation for site.USER_BASE " + "for full details.) On Debian systems, this is the " + "default when running outside of a virtual environment " + "and not as root.") + + cmd_opts.add_option( + '--no-user', + dest='use_system_location', + action='store_true', + help=SUPPRESS_HELP) + cmd_opts.add_option( + '--root', + dest='root_path', + metavar='dir', + default=None, + help="Install everything relative to this alternate root " + "directory.") + cmd_opts.add_option( + '--prefix', + dest='prefix_path', + metavar='dir', + default=None, + help="Installation prefix where lib, bin and other top-level " + "folders are placed") + + cmd_opts.add_option( + '--system', + dest='use_system_location', + action='store_true', + help="Install using the system scheme (overrides --user on " + "Debian systems)") + + cmd_opts.add_option(cmdoptions.build_dir()) + + cmd_opts.add_option(cmdoptions.src()) + + cmd_opts.add_option( + '-U', '--upgrade', + dest='upgrade', + action='store_true', + help='Upgrade all specified packages to the newest available ' + 'version. The handling of dependencies depends on the ' + 'upgrade-strategy used.' + ) + + cmd_opts.add_option( + '--upgrade-strategy', + dest='upgrade_strategy', + default='only-if-needed', + choices=['only-if-needed', 'eager'], + help='Determines how dependency upgrading should be handled ' + '[default: %default]. ' + '"eager" - dependencies are upgraded regardless of ' + 'whether the currently installed version satisfies the ' + 'requirements of the upgraded package(s). ' + '"only-if-needed" - are upgraded only when they do not ' + 'satisfy the requirements of the upgraded package(s).' + ) + + cmd_opts.add_option( + '--force-reinstall', + dest='force_reinstall', + action='store_true', + help='Reinstall all packages even if they are already ' + 'up-to-date.') + + cmd_opts.add_option( + '-I', '--ignore-installed', + dest='ignore_installed', + action='store_true', + help='Ignore the installed packages, overwriting them. ' + 'This can break your system if the existing package ' + 'is of a different version or was installed ' + 'with a different package manager!' + ) + + cmd_opts.add_option(cmdoptions.ignore_requires_python()) + cmd_opts.add_option(cmdoptions.no_build_isolation()) + cmd_opts.add_option(cmdoptions.use_pep517()) + cmd_opts.add_option(cmdoptions.no_use_pep517()) + + cmd_opts.add_option(cmdoptions.install_options()) + cmd_opts.add_option(cmdoptions.global_options()) + + cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile Python source files to bytecode", + ) + + cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile Python source files to bytecode", + ) + + cmd_opts.add_option( + "--no-warn-script-location", + action="store_false", + dest="warn_script_location", + default=True, + help="Do not warn when installing scripts outside PATH", + ) + cmd_opts.add_option( + "--no-warn-conflicts", + action="store_false", + dest="warn_about_conflicts", + default=True, + help="Do not warn about broken dependencies", + ) + + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.prefer_binary()) + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + cmd_opts.add_option(cmdoptions.progress_bar()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + # type: (Values, List[Any]) -> int + cmdoptions.check_install_build_global(options) + upgrade_strategy = "to-satisfy-only" + if options.upgrade: + upgrade_strategy = options.upgrade_strategy + + cmdoptions.check_dist_restriction(options, check_target=True) + + if options.python_version: + python_versions = [options.python_version] + else: + python_versions = None + + # compute install location defaults + if (not options.use_user_site and not options.prefix_path and not + options.target_dir and not options.use_system_location): + if not running_under_virtualenv() and os.geteuid() != 0: + options.use_user_site = True + + if options.use_system_location: + options.use_user_site = False + + options.src_dir = os.path.abspath(options.src_dir) + install_options = options.install_options or [] + + options.use_user_site = decide_user_install( + options.use_user_site, + prefix_path=options.prefix_path, + target_dir=options.target_dir, + root_path=options.root_path, + isolated_mode=options.isolated_mode, + ) + + target_temp_dir = None # type: Optional[TempDirectory] + target_temp_dir_path = None # type: Optional[str] + if options.target_dir: + options.ignore_installed = True + options.target_dir = os.path.abspath(options.target_dir) + if (os.path.exists(options.target_dir) and not + os.path.isdir(options.target_dir)): + raise CommandError( + "Target path exists but is not a directory, will not " + "continue." + ) + + # Create a target directory for using with the target option + target_temp_dir = TempDirectory(kind="target") + target_temp_dir_path = target_temp_dir.path + + global_options = options.global_options or [] + + session = self.get_default_session(options) + + target_python = make_target_python(options) + finder = self._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + with get_requirement_tracker() as req_tracker, TempDirectory( + options.build_dir, delete=build_delete, kind="install" + ) as directory: + requirement_set = RequirementSet( + check_supported_wheels=not options.target_dir, + ) + + try: + self.populate_requirement_set( + requirement_set, args, options, finder, session, + wheel_cache + ) + + warn_deprecated_install_options( + requirement_set, options.install_options + ) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + use_user_site=options.use_user_site, + ) + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + use_user_site=options.use_user_site, + ignore_installed=options.ignore_installed, + ignore_requires_python=options.ignore_requires_python, + force_reinstall=options.force_reinstall, + upgrade_strategy=upgrade_strategy, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + resolver.resolve(requirement_set) + + try: + pip_req = requirement_set.get_requirement("pip") + except KeyError: + modifying_pip = None + else: + # If we're not replacing an already installed pip, + # we're not modifying it. + modifying_pip = pip_req.satisfied_by is None + protect_pip_from_modification_on_windows( + modifying_pip=modifying_pip + ) + + check_binary_allowed = get_check_binary_allowed( + finder.format_control + ) + + reqs_to_build = [ + r for r in requirement_set.requirements.values() + if should_build_for_install_command( + r, check_binary_allowed + ) + ] + + _, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + build_options=[], + global_options=[], + ) + + # If we're using PEP 517, we cannot do a direct install + # so we fail here. + # We don't care about failures building legacy + # requirements, as we'll fall through to a direct + # install for those. + pep517_build_failures = [ + r for r in build_failures if r.use_pep517 + ] + if pep517_build_failures: + raise InstallationError( + "Could not build wheels for {} which use" + " PEP 517 and cannot be installed directly".format( + ", ".join(r.name for r in pep517_build_failures))) + + to_install = resolver.get_installation_order( + requirement_set + ) + + # Consistency Checking of the package set we're installing. + should_warn_about_conflicts = ( + not options.ignore_dependencies and + options.warn_about_conflicts + ) + if should_warn_about_conflicts: + self._warn_about_conflicts(to_install) + + # Don't warn about script install locations if + # --target has been specified + warn_script_location = options.warn_script_location + if options.target_dir: + warn_script_location = False + + installed = install_given_reqs( + to_install, + install_options, + global_options, + root=options.root_path, + home=target_temp_dir_path, + prefix=options.prefix_path, + pycompile=options.compile, + warn_script_location=warn_script_location, + use_user_site=options.use_user_site, + ) + + lib_locations = get_lib_location_guesses( + user=options.use_user_site, + home=target_temp_dir_path, + root=options.root_path, + prefix=options.prefix_path, + isolated=options.isolated_mode, + ) + working_set = pkg_resources.WorkingSet(lib_locations) + + installed.sort(key=operator.attrgetter('name')) + items = [] + for result in installed: + item = result.name + try: + installed_version = get_installed_version( + result.name, working_set=working_set + ) + if installed_version: + item += '-' + installed_version + except Exception: + pass + items.append(item) + installed_desc = ' '.join(items) + if installed_desc: + write_output( + 'Successfully installed %s', installed_desc, + ) + except EnvironmentError as error: + show_traceback = (self.verbosity >= 1) + + message = create_env_error_message( + error, show_traceback, options.use_user_site, + ) + logger.error(message, exc_info=show_traceback) + + return ERROR + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() + wheel_cache.cleanup() + + if options.target_dir: + self._handle_target_dir( + options.target_dir, target_temp_dir, options.upgrade + ) + + return SUCCESS + + def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): + ensure_dir(target_dir) + + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + lib_dir_list = [] + + with target_temp_dir: + # Checking both purelib and platlib directories for installed + # packages to be moved to target directory + scheme = distutils_scheme('', home=target_temp_dir.path) + purelib_dir = scheme['purelib'] + platlib_dir = scheme['platlib'] + data_dir = scheme['data'] + + if os.path.exists(purelib_dir): + lib_dir_list.append(purelib_dir) + if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: + lib_dir_list.append(platlib_dir) + if os.path.exists(data_dir): + lib_dir_list.append(data_dir) + + for lib_dir in lib_dir_list: + for item in os.listdir(lib_dir): + if lib_dir == data_dir: + ddir = os.path.join(data_dir, item) + if any(s.startswith(ddir) for s in lib_dir_list[:-1]): + continue + target_item_dir = os.path.join(target_dir, item) + if os.path.exists(target_item_dir): + if not upgrade: + logger.warning( + 'Target directory %s already exists. Specify ' + '--upgrade to force replacement.', + target_item_dir + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + 'Target directory %s already exists and is ' + 'a link. Pip will not automatically replace ' + 'links, please remove if replacement is ' + 'desired.', + target_item_dir + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move( + os.path.join(lib_dir, item), + target_item_dir + ) + + def _warn_about_conflicts(self, to_install): + try: + package_set, _dep_info = check_install_conflicts(to_install) + except Exception: + logger.error("Error checking for conflicts.", exc_info=True) + return + missing, conflicting = _dep_info + + # NOTE: There is some duplication here from pip check + for project_name in missing: + version = package_set[project_name][0] + for dependency in missing[project_name]: + logger.critical( + "%s %s requires %s, which is not installed.", + project_name, version, dependency[1], + ) + + for project_name in conflicting: + version = package_set[project_name][0] + for dep_name, dep_version, req in conflicting[project_name]: + logger.critical( + "%s %s has requirement %s, but you'll have %s %s which is " + "incompatible.", + project_name, version, req, dep_name, dep_version, + ) + + +def get_lib_location_guesses(*args, **kwargs): + scheme = distutils_scheme('', *args, **kwargs) + return [scheme['purelib'], scheme['platlib']] + + +def site_packages_writable(**kwargs): + return all( + test_writable_dir(d) for d in set(get_lib_location_guesses(**kwargs)) + ) + + +def decide_user_install( + use_user_site, # type: Optional[bool] + prefix_path=None, # type: Optional[str] + target_dir=None, # type: Optional[str] + root_path=None, # type: Optional[str] + isolated_mode=False, # type: bool +): + # type: (...) -> bool + """Determine whether to do a user install based on the input options. + + If use_user_site is False, no additional checks are done. + If use_user_site is True, it is checked for compatibility with other + options. + If use_user_site is None, the default behaviour depends on the environment, + which is provided by the other arguments. + """ + # In some cases (config from tox), use_user_site can be set to an integer + # rather than a bool, which 'use_user_site is False' wouldn't catch. + if (use_user_site is not None) and (not use_user_site): + logger.debug("Non-user install by explicit request") + return False + + if use_user_site: + if prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + logger.debug("User install by explicit request") + return True + + # If we are here, user installs have not been explicitly requested/avoided + assert use_user_site is None + + # user install incompatible with --prefix/--target + if prefix_path or target_dir: + logger.debug("Non-user install due to --prefix or --target option") + return False + + # If user installs are not enabled, choose a non-user install + if not site.ENABLE_USER_SITE: + logger.debug("Non-user install because user site-packages disabled") + return False + + # If we have permission for a non-user install, do that, + # otherwise do a user install. + if site_packages_writable(root=root_path, isolated=isolated_mode): + logger.debug("Non-user install because site-packages writeable") + return False + + logger.info("Defaulting to user installation because normal site-packages " + "is not writeable") + return True + + +def warn_deprecated_install_options(requirement_set, options): + # type: (RequirementSet, Optional[List[str]]) -> None + """If any location-changing --install-option arguments were passed for + requirements or on the command-line, then show a deprecation warning. + """ + def format_options(option_names): + # type: (Iterable[str]) -> List[str] + return ["--{}".format(name.replace("_", "-")) for name in option_names] + + requirements = ( + requirement_set.unnamed_requirements + + list(requirement_set.requirements.values()) + ) + + offenders = [] + + for requirement in requirements: + install_options = requirement.options.get("install_options", []) + location_options = parse_distutils_args(install_options) + if location_options: + offenders.append( + "{!r} from {}".format( + format_options(location_options.keys()), requirement + ) + ) + + if options: + location_options = parse_distutils_args(options) + if location_options: + offenders.append( + "{!r} from command line".format( + format_options(location_options.keys()) + ) + ) + + if not offenders: + return + + deprecated( + reason=( + "Location-changing options found in --install-option: {}. " + "This configuration may cause unexpected behavior and is " + "unsupported.".format( + "; ".join(offenders) + ) + ), + replacement=( + "using pip-level options like --user, --prefix, --root, and " + "--target" + ), + gone_in="20.2", + issue=7309, + ) + + +def create_env_error_message(error, show_traceback, using_user_site): + """Format an error message for an EnvironmentError + + It may occur anytime during the execution of the install command. + """ + parts = [] + + # Mention the error if we are not going to show a traceback + parts.append("Could not install packages due to an EnvironmentError") + if not show_traceback: + parts.append(": ") + parts.append(str(error)) + else: + parts.append(".") + + # Spilt the error indication from a helper message (if any) + parts[-1] += "\n" + + # Suggest useful actions to the user: + # (1) using user site-packages or (2) verifying the permissions + if error.errno == errno.EACCES: + user_option_part = "Consider using the `--user` option" + permissions_part = "Check the permissions" + + if not using_user_site: + parts.extend([ + user_option_part, " or ", + permissions_part.lower(), + ]) + else: + parts.append(permissions_part) + parts.append(".\n") + + return "".join(parts).strip() + "\n" diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/list.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/list.py new file mode 100644 index 0000000000000000000000000000000000000000..d0062063e7be3e3fa1f09452e793eceafc1b2343 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/list.py @@ -0,0 +1,315 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import json +import logging + +from pip._vendor import six +from pip._vendor.six.moves import zip_longest + +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import IndexGroupCommand +from pip._internal.exceptions import CommandError +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.self_outdated_check import make_link_collector +from pip._internal.utils.misc import ( + dist_is_editable, + get_installed_distributions, + write_output, +) +from pip._internal.utils.packaging import get_installer + +from pip._vendor.packaging.version import parse + +logger = logging.getLogger(__name__) + + +class ListCommand(IndexGroupCommand): + """ + List installed packages, including editables. + + Packages are listed in a case-insensitive sorted order. + """ + + usage = """ + %prog [options]""" + + def __init__(self, *args, **kw): + super(ListCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-o', '--outdated', + action='store_true', + default=False, + help='List outdated packages') + cmd_opts.add_option( + '-u', '--uptodate', + action='store_true', + default=False, + help='List uptodate packages') + cmd_opts.add_option( + '-e', '--editable', + action='store_true', + default=False, + help='List editable projects.') + cmd_opts.add_option( + '-l', '--local', + action='store_true', + default=False, + help=('If in a virtualenv that has global access, do not list ' + 'globally-installed packages.'), + ) + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + cmd_opts.add_option(cmdoptions.list_path()) + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + cmd_opts.add_option( + '--format', + action='store', + dest='list_format', + default="columns", + choices=('columns', 'freeze', 'json'), + help="Select the output format among: columns (default), freeze, " + "or json", + ) + + cmd_opts.add_option( + '--not-required', + action='store_true', + dest='not_required', + help="List packages that are not dependencies of " + "installed packages.", + ) + + cmd_opts.add_option( + '--exclude-editable', + action='store_false', + dest='include_editable', + help='Exclude editable package from output.', + ) + cmd_opts.add_option( + '--include-editable', + action='store_true', + dest='include_editable', + help='Include editable package from output.', + default=True, + ) + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, self.parser + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def _build_package_finder(self, options, session): + """ + Create a package finder appropriate to this list command. + """ + link_collector = make_link_collector(session, options=options) + + # Pass allow_yanked=False to ignore yanked versions. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=options.pre, + ) + + return PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + + def run(self, options, args): + if options.outdated and options.uptodate: + raise CommandError( + "Options --outdated and --uptodate cannot be combined.") + + cmdoptions.check_list_path_option(options) + + packages = get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + include_editables=options.include_editable, + paths=options.path, + ) + + # get_not_required must be called firstly in order to find and + # filter out all dependencies correctly. Otherwise a package + # can't be identified as requirement because some parent packages + # could be filtered out before. + if options.not_required: + packages = self.get_not_required(packages, options) + + if options.outdated: + packages = self.get_outdated(packages, options) + elif options.uptodate: + packages = self.get_uptodate(packages, options) + + self.output_package_listing(packages, options) + + def get_outdated(self, packages, options): + return [ + dist for dist in self.iter_packages_latest_infos(packages, options) + if parse(str(dist.latest_version)) > parse(str(dist.parsed_version)) + ] + + def get_uptodate(self, packages, options): + return [ + dist for dist in self.iter_packages_latest_infos(packages, options) + if parse(str(dist.latest_version)) == parse(str(dist.parsed_version)) + ] + + def get_not_required(self, packages, options): + dep_keys = set() + for dist in packages: + dep_keys.update(requirement.key for requirement in dist.requires()) + return {pkg for pkg in packages if pkg.key not in dep_keys} + + def iter_packages_latest_infos(self, packages, options): + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + + for dist in packages: + typ = 'unknown' + all_candidates = finder.find_all_candidates(dist.key) + if not options.pre: + # Remove prereleases + all_candidates = [candidate for candidate in all_candidates + if not candidate.version.is_prerelease] + + evaluator = finder.make_candidate_evaluator( + project_name=dist.project_name, + ) + best_candidate = evaluator.sort_best_candidate(all_candidates) + if best_candidate is None: + continue + + remote_version = best_candidate.version + if best_candidate.link.is_wheel: + typ = 'wheel' + else: + typ = 'sdist' + # This is dirty but makes the rest of the code much cleaner + dist.latest_version = remote_version + dist.latest_filetype = typ + yield dist + + def output_package_listing(self, packages, options): + packages = sorted( + packages, + key=lambda dist: dist.project_name.lower(), + ) + if options.list_format == 'columns' and packages: + data, header = format_for_columns(packages, options) + self.output_package_listing_columns(data, header) + elif options.list_format == 'freeze': + for dist in packages: + if options.verbose >= 1: + write_output("%s==%s (%s)", dist.project_name, + dist.version, dist.location) + else: + write_output("%s==%s", dist.project_name, dist.version) + elif options.list_format == 'json': + write_output(format_for_json(packages, options)) + + def output_package_listing_columns(self, data, header): + # insert the header first: we need to know the size of column names + if len(data) > 0: + data.insert(0, header) + + pkg_strings, sizes = tabulate(data) + + # Create and add a separator. + if len(data) > 0: + pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes))) + + for val in pkg_strings: + write_output(val) + + +def tabulate(vals): + # From pfmoore on GitHub: + # https://github.com/pypa/pip/issues/3651#issuecomment-216932564 + assert len(vals) > 0 + + sizes = [0] * max(len(x) for x in vals) + for row in vals: + sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)] + + result = [] + for row in vals: + display = " ".join([str(c).ljust(s) if c is not None else '' + for s, c in zip_longest(sizes, row)]) + result.append(display) + + return result, sizes + + +def format_for_columns(pkgs, options): + """ + Convert the package data into something usable + by output_package_listing_columns. + """ + running_outdated = options.outdated + # Adjust the header for the `pip list --outdated` case. + if running_outdated: + header = ["Package", "Version", "Latest", "Type"] + else: + header = ["Package", "Version"] + + data = [] + if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs): + header.append("Location") + if options.verbose >= 1: + header.append("Installer") + + for proj in pkgs: + # if we're working on the 'outdated' list, separate out the + # latest_version and type + row = [proj.project_name, proj.version] + + if running_outdated: + row.append(proj.latest_version) + row.append(proj.latest_filetype) + + if options.verbose >= 1 or dist_is_editable(proj): + row.append(proj.location) + if options.verbose >= 1: + row.append(get_installer(proj)) + + data.append(row) + + return data, header + + +def format_for_json(packages, options): + data = [] + for dist in packages: + info = { + 'name': dist.project_name, + 'version': six.text_type(dist.version), + } + if options.verbose >= 1: + info['location'] = dist.location + info['installer'] = get_installer(dist) + if options.outdated: + info['latest_version'] = six.text_type(dist.latest_version) + info['latest_filetype'] = dist.latest_filetype + data.append(info) + return json.dumps(data) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/search.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/search.py new file mode 100644 index 0000000000000000000000000000000000000000..2e880eec2242a0aec7a2a6b53cde16fa106fd683 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/search.py @@ -0,0 +1,145 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import sys +import textwrap +from collections import OrderedDict + +from pip._vendor import pkg_resources +from pip._vendor.packaging.version import parse as parse_version +# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import +from pip._vendor.six.moves import xmlrpc_client # type: ignore + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin +from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS +from pip._internal.exceptions import CommandError +from pip._internal.models.index import PyPI +from pip._internal.network.xmlrpc import PipXmlrpcTransport +from pip._internal.utils.compat import get_terminal_size +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class SearchCommand(Command, SessionCommandMixin): + """Search for PyPI packages whose name or summary contains <query>.""" + + usage = """ + %prog [options] <query>""" + ignore_require_venv = True + + def __init__(self, *args, **kw): + super(SearchCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-i', '--index', + dest='index', + metavar='URL', + default=PyPI.pypi_url, + help='Base URL of Python Package Index (default %default)') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + raise CommandError('Missing required argument (search query).') + query = args + pypi_hits = self.search(query, options) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query, options): + index_url = options.index + + session = self.get_default_session(options) + + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc_client.ServerProxy(index_url, transport) + hits = pypi.search({'name': query, 'summary': query}, 'or') + return hits + + +def transform_hits(hits): + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages = OrderedDict() + for hit in hits: + name = hit['name'] + summary = hit['summary'] + version = hit['version'] + + if name not in packages.keys(): + packages[name] = { + 'name': name, + 'summary': summary, + 'versions': [version], + } + else: + packages[name]['versions'].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]['versions']): + packages[name]['summary'] = summary + + return list(packages.values()) + + +def print_results(hits, name_column_width=None, terminal_width=None): + if not hits: + return + if name_column_width is None: + name_column_width = max([ + len(hit['name']) + len(highest_version(hit.get('versions', ['-']))) + for hit in hits + ]) + 4 + + installed_packages = [p.project_name for p in pkg_resources.working_set] + for hit in hits: + name = hit['name'] + summary = hit['summary'] or '' + latest = highest_version(hit.get('versions', ['-'])) + if terminal_width is not None: + target_width = terminal_width - name_column_width - 5 + if target_width > 10: + # wrap and indent summary to fit terminal + summary = textwrap.wrap(summary, target_width) + summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + + line = '%-*s - %s' % (name_column_width, + '%s (%s)' % (name, latest), summary) + try: + write_output(line) + if name in installed_packages: + dist = pkg_resources.get_distribution(name) + with indent_log(): + if dist.version == latest: + write_output('INSTALLED: %s (latest)', dist.version) + else: + write_output('INSTALLED: %s', dist.version) + if parse_version(latest).pre: + write_output('LATEST: %s (pre-release; install' + ' with "pip install --pre")', latest) + else: + write_output('LATEST: %s', latest) + except UnicodeEncodeError: + pass + + +def highest_version(versions): + return max(versions, key=parse_version) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/show.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/show.py new file mode 100644 index 0000000000000000000000000000000000000000..a46b08eeb3d22852421b2b6c8b1b15ce12aaa9a3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/show.py @@ -0,0 +1,180 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import os +from email.parser import FeedParser + +from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.status_codes import ERROR, SUCCESS +from pip._internal.utils.misc import write_output + +logger = logging.getLogger(__name__) + + +class ShowCommand(Command): + """ + Show information about one or more installed packages. + + The output is in RFC-compliant mail header format. + """ + + usage = """ + %prog [options] <package> ...""" + ignore_require_venv = True + + def __init__(self, *args, **kw): + super(ShowCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-f', '--files', + dest='files', + action='store_true', + default=False, + help='Show the full list of installed files for each package.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + logger.warning('ERROR: Please provide a package name or names.') + return ERROR + query = args + + results = search_packages_info(query) + if not print_results( + results, list_files=options.files, verbose=options.verbose): + return ERROR + return SUCCESS + + +def search_packages_info(query): + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + installed = {} + for p in pkg_resources.working_set: + installed[canonicalize_name(p.project_name)] = p + + query_names = [canonicalize_name(name) for name in query] + missing = sorted( + [name for name, pkg in zip(query, query_names) if pkg not in installed] + ) + if missing: + logger.warning('Package(s) not found: %s', ', '.join(missing)) + + def get_requiring_packages(package_name): + canonical_name = canonicalize_name(package_name) + return [ + pkg.project_name for pkg in pkg_resources.working_set + if canonical_name in + [canonicalize_name(required.name) for required in + pkg.requires()] + ] + + for dist in [installed[pkg] for pkg in query_names if pkg in installed]: + package = { + 'name': dist.project_name, + 'version': dist.version, + 'location': dist.location, + 'requires': [dep.project_name for dep in dist.requires()], + 'required_by': get_requiring_packages(dist.project_name) + } + file_list = None + metadata = None + if isinstance(dist, pkg_resources.DistInfoDistribution): + # RECORDs should be part of .dist-info metadatas + if dist.has_metadata('RECORD'): + lines = dist.get_metadata_lines('RECORD') + paths = [l.split(',')[0] for l in lines] + paths = [os.path.join(dist.location, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('METADATA'): + metadata = dist.get_metadata('METADATA') + else: + # Otherwise use pip's log for .egg-info's + if dist.has_metadata('installed-files.txt'): + paths = dist.get_metadata_lines('installed-files.txt') + paths = [os.path.join(dist.egg_info, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('PKG-INFO'): + metadata = dist.get_metadata('PKG-INFO') + + if dist.has_metadata('entry_points.txt'): + entry_points = dist.get_metadata_lines('entry_points.txt') + package['entry_points'] = entry_points + + if dist.has_metadata('INSTALLER'): + for line in dist.get_metadata_lines('INSTALLER'): + if line.strip(): + package['installer'] = line.strip() + break + + # @todo: Should pkg_resources.Distribution have a + # `get_pkg_info` method? + feed_parser = FeedParser() + feed_parser.feed(metadata) + pkg_info_dict = feed_parser.close() + for key in ('metadata-version', 'summary', + 'home-page', 'author', 'author-email', 'license'): + package[key] = pkg_info_dict.get(key) + + # It looks like FeedParser cannot deal with repeated headers + classifiers = [] + for line in metadata.splitlines(): + if line.startswith('Classifier: '): + classifiers.append(line[len('Classifier: '):]) + package['classifiers'] = classifiers + + if file_list: + package['files'] = sorted(file_list) + yield package + + +def print_results(distributions, list_files=False, verbose=False): + """ + Print the informations from installed distributions found. + """ + results_printed = False + for i, dist in enumerate(distributions): + results_printed = True + if i > 0: + write_output("---") + + write_output("Name: %s", dist.get('name', '')) + write_output("Version: %s", dist.get('version', '')) + write_output("Summary: %s", dist.get('summary', '')) + write_output("Home-page: %s", dist.get('home-page', '')) + write_output("Author: %s", dist.get('author', '')) + write_output("Author-email: %s", dist.get('author-email', '')) + write_output("License: %s", dist.get('license', '')) + write_output("Location: %s", dist.get('location', '')) + write_output("Requires: %s", ', '.join(dist.get('requires', []))) + write_output("Required-by: %s", ', '.join(dist.get('required_by', []))) + + if verbose: + write_output("Metadata-Version: %s", + dist.get('metadata-version', '')) + write_output("Installer: %s", dist.get('installer', '')) + write_output("Classifiers:") + for classifier in dist.get('classifiers', []): + write_output(" %s", classifier) + write_output("Entry-points:") + for entry in dist.get('entry_points', []): + write_output(" %s", entry.strip()) + if list_files: + write_output("Files:") + for line in dist.get('files', []): + write_output(" %s", line.strip()) + if "files" not in dist: + write_output("Cannot locate installed-files.txt") + return results_printed diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py new file mode 100644 index 0000000000000000000000000000000000000000..1bde414a6c1a5f4b9ef5b990f22b334a9b0a71b6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py @@ -0,0 +1,82 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.cli.base_command import Command +from pip._internal.cli.req_command import SessionCommandMixin +from pip._internal.exceptions import InstallationError +from pip._internal.req import parse_requirements +from pip._internal.req.constructors import install_req_from_line +from pip._internal.utils.misc import protect_pip_from_modification_on_windows + + +class UninstallCommand(Command, SessionCommandMixin): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + + usage = """ + %prog [options] <package> ... + %prog [options] -r <requirements file> ...""" + + def __init__(self, *args, **kw): + super(UninstallCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Uninstall all the packages listed in the given requirements ' + 'file. This option can be used multiple times.', + ) + self.cmd_opts.add_option( + '-y', '--yes', + dest='yes', + action='store_true', + help="Don't ask for confirmation of uninstall deletions.") + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + session = self.get_default_session(options) + + reqs_to_uninstall = {} + for name in args: + req = install_req_from_line( + name, isolated=options.isolated_mode, + ) + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + for filename in options.requirements: + for req in parse_requirements( + filename, + options=options, + session=session): + if req.name: + reqs_to_uninstall[canonicalize_name(req.name)] = req + if not reqs_to_uninstall: + raise InstallationError( + 'You must give at least one requirement to %(name)s (see ' + '"pip help %(name)s")' % dict(name=self.name) + ) + + protect_pip_from_modification_on_windows( + modifying_pip="pip" in reqs_to_uninstall + ) + + for req in reqs_to_uninstall.values(): + uninstall_pathset = req.uninstall( + auto_confirm=options.yes, verbose=self.verbosity > 0, + ) + if uninstall_pathset: + uninstall_pathset.commit() diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/commands/wheel.py b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/wheel.py new file mode 100644 index 0000000000000000000000000000000000000000..eb44bcee45930d10654d5bdc6af5658aef01bc41 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/commands/wheel.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import os +import shutil + +from pip._internal.cache import WheelCache +from pip._internal.cli import cmdoptions +from pip._internal.cli.req_command import RequirementCommand +from pip._internal.exceptions import CommandError, PreviousBuildDirError +from pip._internal.req import RequirementSet +from pip._internal.req.req_tracker import get_requirement_tracker +from pip._internal.utils.misc import ensure_dir, normalize_path +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.wheel_builder import build, should_build_for_wheel_command + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import Any, List + + +logger = logging.getLogger(__name__) + + +class WheelCommand(RequirementCommand): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not + recompiling your software during every install. For more details, see the + wheel docs: https://wheel.readthedocs.io/en/latest/ + + Requirements: setuptools>=0.8, and wheel. + + 'pip wheel' uses the bdist_wheel setuptools extension from the wheel + package to build individual wheels. + + """ + + usage = """ + %prog [options] <requirement specifier> ... + %prog [options] -r <requirements file> ... + %prog [options] [-e] <vcs project url> ... + %prog [options] [-e] <local project path> ... + %prog [options] <archive url/path> ...""" + + def __init__(self, *args, **kw): + super(WheelCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-w', '--wheel-dir', + dest='wheel_dir', + metavar='dir', + default=os.curdir, + help=("Build wheels into <dir>, where the default is the " + "current working directory."), + ) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.prefer_binary()) + cmd_opts.add_option( + '--build-option', + dest='build_options', + metavar='options', + action='append', + help="Extra arguments to be supplied to 'setup.py bdist_wheel'.", + ) + cmd_opts.add_option(cmdoptions.no_build_isolation()) + cmd_opts.add_option(cmdoptions.use_pep517()) + cmd_opts.add_option(cmdoptions.no_use_pep517()) + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.src()) + cmd_opts.add_option(cmdoptions.ignore_requires_python()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.build_dir()) + cmd_opts.add_option(cmdoptions.progress_bar()) + + cmd_opts.add_option( + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the 'bdist_wheel' command.") + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + # type: (Values, List[Any]) -> None + cmdoptions.check_install_build_global(options) + + session = self.get_default_session(options) + + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + + options.wheel_dir = normalize_path(options.wheel_dir) + ensure_dir(options.wheel_dir) + + with get_requirement_tracker() as req_tracker, TempDirectory( + options.build_dir, delete=build_delete, kind="wheel" + ) as directory: + + requirement_set = RequirementSet() + + try: + self.populate_requirement_set( + requirement_set, args, options, finder, session, + wheel_cache + ) + + preparer = self.make_requirement_preparer( + temp_build_dir=directory, + options=options, + req_tracker=req_tracker, + session=session, + finder=finder, + wheel_download_dir=options.wheel_dir, + use_user_site=False, + ) + + resolver = self.make_resolver( + preparer=preparer, + finder=finder, + options=options, + wheel_cache=wheel_cache, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + ) + + self.trace_basic_info(finder) + + resolver.resolve(requirement_set) + + reqs_to_build = [ + r for r in requirement_set.requirements.values() + if should_build_for_wheel_command(r) + ] + + # build wheels + build_successes, build_failures = build( + reqs_to_build, + wheel_cache=wheel_cache, + build_options=options.build_options or [], + global_options=options.global_options or [], + ) + for req in build_successes: + assert req.link and req.link.is_wheel + assert req.local_file_path + # copy from cache to target directory + try: + shutil.copy(req.local_file_path, options.wheel_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + build_failures.append(req) + if len(build_failures) != 0: + raise CommandError( + "Failed to build one or more wheels" + ) + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + if not options.no_clean: + requirement_set.cleanup_files() + wheel_cache.cleanup() diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/configuration.py b/backend/test/lib/python3.8/site-packages/pip/_internal/configuration.py new file mode 100644 index 0000000000000000000000000000000000000000..f09a1ae25c2b58ad5d15040efc4cbd99658e54b6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/configuration.py @@ -0,0 +1,422 @@ +"""Configuration management setup + +Some terminology: +- name + As written in config files. +- value + Value associated with a name +- key + Name combined with it's section (section.name) +- variant + A single word describing where the configuration key-value pair came from +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import locale +import logging +import os +import sys + +from pip._vendor.six.moves import configparser + +from pip._internal.exceptions import ( + ConfigurationError, + ConfigurationFileCouldNotBeLoaded, +) +from pip._internal.utils import appdirs +from pip._internal.utils.compat import WINDOWS, expanduser +from pip._internal.utils.misc import ensure_dir, enum +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Dict, Iterable, List, NewType, Optional, Tuple + ) + + RawConfigParser = configparser.RawConfigParser # Shorthand + Kind = NewType("Kind", str) + +logger = logging.getLogger(__name__) + + +# NOTE: Maybe use the optionx attribute to normalize keynames. +def _normalize_name(name): + # type: (str) -> str + """Make a name consistent regardless of source (environment or file) + """ + name = name.lower().replace('_', '-') + if name.startswith('--'): + name = name[2:] # only prefer long opts + return name + + +def _disassemble_key(name): + # type: (str) -> List[str] + if "." not in name: + error_message = ( + "Key does not contain dot separated section and key. " + "Perhaps you wanted to use 'global.{}' instead?" + ).format(name) + raise ConfigurationError(error_message) + return name.split(".", 1) + + +# The kinds of configurations there are. +kinds = enum( + USER="user", # User Specific + GLOBAL="global", # System Wide + SITE="site", # [Virtual] Environment Specific + ENV="env", # from PIP_CONFIG_FILE + ENV_VAR="env-var", # from Environment Variables +) + + +CONFIG_BASENAME = 'pip.ini' if WINDOWS else 'pip.conf' + + +def get_configuration_files(): + # type: () -> Dict[Kind, List[str]] + global_config_files = [ + os.path.join(path, CONFIG_BASENAME) + for path in appdirs.site_config_dirs('pip') + ] + + site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME) + legacy_config_file = os.path.join( + expanduser('~'), + 'pip' if WINDOWS else '.pip', + CONFIG_BASENAME, + ) + new_config_file = os.path.join( + appdirs.user_config_dir("pip"), CONFIG_BASENAME + ) + return { + kinds.GLOBAL: global_config_files, + kinds.SITE: [site_config_file], + kinds.USER: [legacy_config_file, new_config_file], + } + + +class Configuration(object): + """Handles management of configuration. + + Provides an interface to accessing and managing configuration files. + + This class converts provides an API that takes "section.key-name" style + keys and stores the value associated with it as "key-name" under the + section "section". + + This allows for a clean interface wherein the both the section and the + key-name are preserved in an easy to manage form in the configuration files + and the data stored is also nice. + """ + + def __init__(self, isolated, load_only=None): + # type: (bool, Kind) -> None + super(Configuration, self).__init__() + + _valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.SITE, None] + if load_only not in _valid_load_only: + raise ConfigurationError( + "Got invalid value for load_only - should be one of {}".format( + ", ".join(map(repr, _valid_load_only[:-1])) + ) + ) + self.isolated = isolated # type: bool + self.load_only = load_only # type: Optional[Kind] + + # The order here determines the override order. + self._override_order = [ + kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR + ] + + self._ignore_env_names = ["version", "help"] + + # Because we keep track of where we got the data from + self._parsers = { + variant: [] for variant in self._override_order + } # type: Dict[Kind, List[Tuple[str, RawConfigParser]]] + self._config = { + variant: {} for variant in self._override_order + } # type: Dict[Kind, Dict[str, Any]] + self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]] + + def load(self): + # type: () -> None + """Loads configuration from configuration files and environment + """ + self._load_config_files() + if not self.isolated: + self._load_environment_vars() + + def get_file_to_edit(self): + # type: () -> Optional[str] + """Returns the file with highest priority in configuration + """ + assert self.load_only is not None, \ + "Need to be specified a file to be editing" + + try: + return self._get_parser_to_modify()[0] + except IndexError: + return None + + def items(self): + # type: () -> Iterable[Tuple[str, Any]] + """Returns key-value pairs like dict.items() representing the loaded + configuration + """ + return self._dictionary.items() + + def get_value(self, key): + # type: (str) -> Any + """Get a value from the configuration. + """ + try: + return self._dictionary[key] + except KeyError: + raise ConfigurationError("No such key - {}".format(key)) + + def set_value(self, key, value): + # type: (str, Any) -> None + """Modify a value in the configuration. + """ + self._ensure_have_load_only() + + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + + # Modify the parser and the configuration + if not parser.has_section(section): + parser.add_section(section) + parser.set(section, name, value) + + self._config[self.load_only][key] = value + self._mark_as_modified(fname, parser) + + def unset_value(self, key): + # type: (str) -> None + """Unset a value in the configuration. + """ + self._ensure_have_load_only() + + if key not in self._config[self.load_only]: + raise ConfigurationError("No such key - {}".format(key)) + + fname, parser = self._get_parser_to_modify() + + if parser is not None: + section, name = _disassemble_key(key) + + # Remove the key in the parser + modified_something = False + if parser.has_section(section): + # Returns whether the option was removed or not + modified_something = parser.remove_option(section, name) + + if modified_something: + # name removed from parser, section may now be empty + section_iter = iter(parser.items(section)) + try: + val = next(section_iter) + except StopIteration: + val = None + + if val is None: + parser.remove_section(section) + + self._mark_as_modified(fname, parser) + else: + raise ConfigurationError( + "Fatal Internal error [id=1]. Please report as a bug." + ) + + del self._config[self.load_only][key] + + def save(self): + # type: () -> None + """Save the current in-memory state. + """ + self._ensure_have_load_only() + + for fname, parser in self._modified_parsers: + logger.info("Writing to %s", fname) + + # Ensure directory exists. + ensure_dir(os.path.dirname(fname)) + + with open(fname, "w") as f: + parser.write(f) + + # + # Private routines + # + + def _ensure_have_load_only(self): + # type: () -> None + if self.load_only is None: + raise ConfigurationError("Needed a specific file to be modifying.") + logger.debug("Will be working with %s variant only", self.load_only) + + @property + def _dictionary(self): + # type: () -> Dict[str, Any] + """A dictionary representing the loaded configuration. + """ + # NOTE: Dictionaries are not populated if not loaded. So, conditionals + # are not needed here. + retval = {} + + for variant in self._override_order: + retval.update(self._config[variant]) + + return retval + + def _load_config_files(self): + # type: () -> None + """Loads configuration from configuration files + """ + config_files = dict(self._iter_config_files()) + if config_files[kinds.ENV][0:1] == [os.devnull]: + logger.debug( + "Skipping loading configuration files due to " + "environment's PIP_CONFIG_FILE being os.devnull" + ) + return + + for variant, files in config_files.items(): + for fname in files: + # If there's specific variant set in `load_only`, load only + # that variant, not the others. + if self.load_only is not None and variant != self.load_only: + logger.debug( + "Skipping file '%s' (variant: %s)", fname, variant + ) + continue + + parser = self._load_file(variant, fname) + + # Keeping track of the parsers used + self._parsers[variant].append((fname, parser)) + + def _load_file(self, variant, fname): + # type: (Kind, str) -> RawConfigParser + logger.debug("For variant '%s', will try loading '%s'", variant, fname) + parser = self._construct_parser(fname) + + for section in parser.sections(): + items = parser.items(section) + self._config[variant].update(self._normalized_keys(section, items)) + + return parser + + def _construct_parser(self, fname): + # type: (str) -> RawConfigParser + parser = configparser.RawConfigParser() + # If there is no such file, don't bother reading it but create the + # parser anyway, to hold the data. + # Doing this is useful when modifying and saving files, where we don't + # need to construct a parser. + if os.path.exists(fname): + try: + parser.read(fname) + except UnicodeDecodeError: + # See https://github.com/pypa/pip/issues/4963 + raise ConfigurationFileCouldNotBeLoaded( + reason="contains invalid {} characters".format( + locale.getpreferredencoding(False) + ), + fname=fname, + ) + except configparser.Error as error: + # See https://github.com/pypa/pip/issues/4893 + raise ConfigurationFileCouldNotBeLoaded(error=error) + return parser + + def _load_environment_vars(self): + # type: () -> None + """Loads configuration from environment variables + """ + self._config[kinds.ENV_VAR].update( + self._normalized_keys(":env:", self._get_environ_vars()) + ) + + def _normalized_keys(self, section, items): + # type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any] + """Normalizes items to construct a dictionary with normalized keys. + + This routine is where the names become keys and are made the same + regardless of source - configuration files or environment. + """ + normalized = {} + for name, val in items: + key = section + "." + _normalize_name(name) + normalized[key] = val + return normalized + + def _get_environ_vars(self): + # type: () -> Iterable[Tuple[str, str]] + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + should_be_yielded = ( + key.startswith("PIP_") and + key[4:].lower() not in self._ignore_env_names + ) + if should_be_yielded: + yield key[4:].lower(), val + + # XXX: This is patched in the tests. + def _iter_config_files(self): + # type: () -> Iterable[Tuple[Kind, List[str]]] + """Yields variant and configuration files associated with it. + + This should be treated like items of a dictionary. + """ + # SMELL: Move the conditions out of this function + + # environment variables have the lowest priority + config_file = os.environ.get('PIP_CONFIG_FILE', None) + if config_file is not None: + yield kinds.ENV, [config_file] + else: + yield kinds.ENV, [] + + config_files = get_configuration_files() + + # at the base we have any global configuration + yield kinds.GLOBAL, config_files[kinds.GLOBAL] + + # per-user configuration next + should_load_user_config = not self.isolated and not ( + config_file and os.path.exists(config_file) + ) + if should_load_user_config: + # The legacy config file is overridden by the new config file + yield kinds.USER, config_files[kinds.USER] + + # finally virtualenv configuration first trumping others + yield kinds.SITE, config_files[kinds.SITE] + + def _get_parser_to_modify(self): + # type: () -> Tuple[str, RawConfigParser] + # Determine which parser to modify + parsers = self._parsers[self.load_only] + if not parsers: + # This should not happen if everything works correctly. + raise ConfigurationError( + "Fatal Internal error [id=2]. Please report as a bug." + ) + + # Use the highest priority parser. + return parsers[-1] + + # XXX: This is patched in the tests. + def _mark_as_modified(self, fname, parser): + # type: (str, RawConfigParser) -> None + file_parser_tuple = (fname, parser) + if file_parser_tuple not in self._modified_parsers: + self._modified_parsers.append(file_parser_tuple) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d5c1afc5bc1ffd09c0ce46f4f2f700a1b996fe47 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py @@ -0,0 +1,24 @@ +from pip._internal.distributions.sdist import SourceDistribution +from pip._internal.distributions.wheel import WheelDistribution +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._internal.distributions.base import AbstractDistribution + from pip._internal.req.req_install import InstallRequirement + + +def make_distribution_for_install_requirement(install_req): + # type: (InstallRequirement) -> AbstractDistribution + """Returns a Distribution for the given InstallRequirement + """ + # Editable requirements will always be source distributions. They use the + # legacy logic until we create a modern standard for them. + if install_req.editable: + return SourceDistribution(install_req) + + # If it's a wheel, it's a WheelDistribution + if install_req.is_wheel: + return WheelDistribution(install_req) + + # Otherwise, a SourceDistribution + return SourceDistribution(install_req) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..30279204ad3152a66d6dfa50369bdfbdb9ed8b36 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/base.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/base.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..faa3c1a03f1eed6a3eb45279ae17bab07aadd8ab Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/base.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d0a3533a2d013fa185c4ca23fa5eefb6e3c6b44 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9aeb4fa5d1b97e48877eea0b97350d2bd401529d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac09789482ab1afc30676b317f6dd951fd8a86e5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/base.py b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/base.py new file mode 100644 index 0000000000000000000000000000000000000000..b836b98d162abda775f4b0c2b132eac4cf58a22d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/base.py @@ -0,0 +1,45 @@ +import abc + +from pip._vendor.six import add_metaclass + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pip._vendor.pkg_resources import Distribution + from pip._internal.req import InstallRequirement + from pip._internal.index.package_finder import PackageFinder + + +@add_metaclass(abc.ABCMeta) +class AbstractDistribution(object): + """A base class for handling installable artifacts. + + The requirements for anything installable are as follows: + + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + + - we must be able to create a Distribution object exposing the + above metadata. + """ + + def __init__(self, req): + # type: (InstallRequirement) -> None + super(AbstractDistribution, self).__init__() + self.req = req + + @abc.abstractmethod + def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] + raise NotImplementedError() + + @abc.abstractmethod + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + raise NotImplementedError() diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/installed.py b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/installed.py new file mode 100644 index 0000000000000000000000000000000000000000..0d15bf42405e541b5154de307c54df47b9b7e2ec --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/installed.py @@ -0,0 +1,24 @@ +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + + from pip._vendor.pkg_resources import Distribution + from pip._internal.index.package_finder import PackageFinder + + +class InstalledDistribution(AbstractDistribution): + """Represents an installed package. + + This does not need any preparation as the required information has already + been computed. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Optional[Distribution] + return self.req.satisfied_by + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + pass diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py new file mode 100644 index 0000000000000000000000000000000000000000..be3d7d97a1cfe877ce549603ebe1e17c65d68803 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py @@ -0,0 +1,104 @@ +import logging + +from pip._internal.build_env import BuildEnvironment +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.exceptions import InstallationError +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Set, Tuple + + from pip._vendor.pkg_resources import Distribution + from pip._internal.index.package_finder import PackageFinder + + +logger = logging.getLogger(__name__) + + +class SourceDistribution(AbstractDistribution): + """Represents a source distribution. + + The preparation step for these needs metadata for the packages to be + generated, either using PEP 517 or using the legacy `setup.py egg_info`. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Distribution + return self.req.get_dist() + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + # Load pyproject.toml, to determine whether PEP 517 is to be used + self.req.load_pyproject_toml() + + # Set up the build isolation, if this requirement should be isolated + should_isolate = self.req.use_pep517 and build_isolation + if should_isolate: + self._setup_isolation(finder) + + self.req.prepare_metadata() + + def _setup_isolation(self, finder): + # type: (PackageFinder) -> None + def _raise_conflicts(conflicting_with, conflicting_reqs): + # type: (str, Set[Tuple[str, str]]) -> None + format_string = ( + "Some build dependencies for {requirement} " + "conflict with {conflicting_with}: {description}." + ) + error_message = format_string.format( + requirement=self.req, + conflicting_with=conflicting_with, + description=', '.join( + '{} is incompatible with {}'.format(installed, wanted) + for installed, wanted in sorted(conflicting) + ) + ) + raise InstallationError(error_message) + + # Isolate in a BuildEnvironment and install the build-time + # requirements. + pyproject_requires = self.req.pyproject_requires + assert pyproject_requires is not None + + self.req.build_env = BuildEnvironment() + self.req.build_env.install_requirements( + finder, pyproject_requires, 'overlay', + "Installing build dependencies" + ) + conflicting, missing = self.req.build_env.check_requirements( + self.req.requirements_to_check + ) + if conflicting: + _raise_conflicts("PEP 517/518 supported requirements", + conflicting) + if missing: + logger.warning( + "Missing build requirements in pyproject.toml for %s.", + self.req, + ) + logger.warning( + "The project does not specify a build backend, and " + "pip cannot fall back to setuptools without %s.", + " and ".join(map(repr, sorted(missing))) + ) + # Install any extra build dependencies that the backend requests. + # This must be done in a second pass, as the pyproject.toml + # dependencies must be installed before we can call the backend. + with self.req.build_env: + runner = runner_with_spinner_message( + "Getting requirements to build wheel" + ) + backend = self.req.pep517_backend + assert backend is not None + with backend.subprocess_runner(runner): + reqs = backend.get_requires_for_build_wheel() + + conflicting, missing = self.req.build_env.check_requirements(reqs) + if conflicting: + _raise_conflicts("the backend dependencies", conflicting) + self.req.build_env.install_requirements( + finder, missing, 'normal', + "Installing backend dependencies" + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py new file mode 100644 index 0000000000000000000000000000000000000000..bf3482b151f08196c82e719a9c194dfc20e4182e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py @@ -0,0 +1,36 @@ +from zipfile import ZipFile + +from pip._internal.distributions.base import AbstractDistribution +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel + +if MYPY_CHECK_RUNNING: + from pip._vendor.pkg_resources import Distribution + from pip._internal.index.package_finder import PackageFinder + + +class WheelDistribution(AbstractDistribution): + """Represents a wheel distribution. + + This does not need any preparation as wheels can be directly unpacked. + """ + + def get_pkg_resources_distribution(self): + # type: () -> Distribution + """Loads the metadata from the wheel file into memory and returns a + Distribution that uses it, not relying on the wheel file or + requirement. + """ + # Set as part of preparation during download. + assert self.req.local_file_path + # Wheels are never unnamed. + assert self.req.name + + with ZipFile(self.req.local_file_path, allowZip64=True) as z: + return pkg_resources_distribution_for_wheel( + z, self.req.name, self.req.local_file_path + ) + + def prepare_distribution_metadata(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None + pass diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/exceptions.py b/backend/test/lib/python3.8/site-packages/pip/_internal/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..dddec789ef40daff2e23a8da45e1042fd210bdc7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/exceptions.py @@ -0,0 +1,308 @@ +"""Exceptions used throughout package""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +from itertools import chain, groupby, repeat + +from pip._vendor.six import iteritems + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + from pip._vendor.pkg_resources import Distribution + from pip._internal.req.req_install import InstallRequirement + + +class PipError(Exception): + """Base pip exception""" + + +class ConfigurationError(PipError): + """General exception in configuration""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class UninstallationError(PipError): + """General exception during uninstallation""" + + +class NoneMetadataError(PipError): + """ + Raised when accessing "METADATA" or "PKG-INFO" metadata for a + pip._vendor.pkg_resources.Distribution object and + `dist.has_metadata('METADATA')` returns True but + `dist.get_metadata('METADATA')` returns None (and similarly for + "PKG-INFO"). + """ + + def __init__(self, dist, metadata_name): + # type: (Distribution, str) -> None + """ + :param dist: A Distribution object. + :param metadata_name: The name of the metadata being accessed + (can be "METADATA" or "PKG-INFO"). + """ + self.dist = dist + self.metadata_name = metadata_name + + def __str__(self): + # type: () -> str + # Use `dist` in the error message because its stringification + # includes more information, like the version and location. + return ( + 'None {} metadata found for distribution: {}'.format( + self.metadata_name, self.dist, + ) + ) + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class RequirementsFileParseError(InstallationError): + """Raised when a general error occurs parsing a requirements file line.""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed.""" + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" + + +class HashErrors(InstallationError): + """Multiple HashError instances rolled into one for reporting""" + + def __init__(self): + self.errors = [] + + def append(self, error): + self.errors.append(error) + + def __str__(self): + lines = [] + self.errors.sort(key=lambda e: e.order) + for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): + lines.append(cls.head) + lines.extend(e.body() for e in errors_of_cls) + if lines: + return '\n'.join(lines) + + def __nonzero__(self): + return bool(self.errors) + + def __bool__(self): + return self.__nonzero__() + + +class HashError(InstallationError): + """ + A failure to verify a package against known-good hashes + + :cvar order: An int sorting hash exception classes by difficulty of + recovery (lower being harder), so the user doesn't bother fretting + about unpinned packages when he has deeper issues, like VCS + dependencies, to deal with. Also keeps error reports in a + deterministic order. + :cvar head: A section heading for display above potentially many + exceptions of this kind + :ivar req: The InstallRequirement that triggered this error. This is + pasted on after the exception is instantiated, because it's not + typically available earlier. + + """ + req = None # type: Optional[InstallRequirement] + head = '' + + def body(self): + """Return a summary of me for display under the heading. + + This default implementation simply prints a description of the + triggering requirement. + + :param req: The InstallRequirement that provoked this error, with + populate_link() having already been called + + """ + return ' %s' % self._requirement_name() + + def __str__(self): + return '%s\n%s' % (self.head, self.body()) + + def _requirement_name(self): + """Return a description of the requirement that triggered me. + + This default implementation returns long description of the req, with + line numbers + + """ + return str(self.req) if self.req else 'unknown package' + + +class VcsHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 0 + head = ("Can't verify hashes for these requirements because we don't " + "have a way to hash version control repositories:") + + +class DirectoryUrlHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 1 + head = ("Can't verify hashes for these file:// requirements because they " + "point to directories:") + + +class HashMissing(HashError): + """A hash was needed for a requirement but is absent.""" + + order = 2 + head = ('Hashes are required in --require-hashes mode, but they are ' + 'missing from some requirements. Here is a list of those ' + 'requirements along with the hashes their downloaded archives ' + 'actually had. Add lines like these to your requirements files to ' + 'prevent tampering. (If you did not enable --require-hashes ' + 'manually, note that it turns on automatically when any package ' + 'has a hash.)') + + def __init__(self, gotten_hash): + """ + :param gotten_hash: The hash of the (possibly malicious) archive we + just downloaded + """ + self.gotten_hash = gotten_hash + + def body(self): + # Dodge circular import. + from pip._internal.utils.hashes import FAVORITE_HASH + + package = None + if self.req: + # In the case of URL-based requirements, display the original URL + # seen in the requirements file rather than the package name, + # so the output can be directly copied into the requirements file. + package = (self.req.original_link if self.req.original_link + # In case someone feeds something downright stupid + # to InstallRequirement's constructor. + else getattr(self.req, 'req', None)) + return ' %s --hash=%s:%s' % (package or 'unknown package', + FAVORITE_HASH, + self.gotten_hash) + + +class HashUnpinned(HashError): + """A requirement had a hash specified but was not pinned to a specific + version.""" + + order = 3 + head = ('In --require-hashes mode, all requirements must have their ' + 'versions pinned with ==. These do not:') + + +class HashMismatch(HashError): + """ + Distribution file hash values don't match. + + :ivar package_name: The name of the package that triggered the hash + mismatch. Feel free to write to this after the exception is raise to + improve its error message. + + """ + order = 4 + head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' + 'FILE. If you have updated the package versions, please update ' + 'the hashes. Otherwise, examine the package contents carefully; ' + 'someone may have tampered with them.') + + def __init__(self, allowed, gots): + """ + :param allowed: A dict of algorithm names pointing to lists of allowed + hex digests + :param gots: A dict of algorithm names pointing to hashes we + actually got from the files under suspicion + """ + self.allowed = allowed + self.gots = gots + + def body(self): + return ' %s:\n%s' % (self._requirement_name(), + self._hash_comparison()) + + def _hash_comparison(self): + """ + Return a comparison of actual and expected hash values. + + Example:: + + Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde + or 123451234512345123451234512345123451234512345 + Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef + + """ + def hash_then_or(hash_name): + # For now, all the decent hashes have 6-char names, so we can get + # away with hard-coding space literals. + return chain([hash_name], repeat(' or')) + + lines = [] + for hash_name, expecteds in iteritems(self.allowed): + prefix = hash_then_or(hash_name) + lines.extend((' Expected %s %s' % (next(prefix), e)) + for e in expecteds) + lines.append(' Got %s\n' % + self.gots[hash_name].hexdigest()) + return '\n'.join(lines) + + +class UnsupportedPythonVersion(InstallationError): + """Unsupported python version according to Requires-Python package + metadata.""" + + +class ConfigurationFileCouldNotBeLoaded(ConfigurationError): + """When there are errors while loading a configuration file + """ + + def __init__(self, reason="could not be loaded", fname=None, error=None): + super(ConfigurationFileCouldNotBeLoaded, self).__init__(error) + self.reason = reason + self.fname = fname + self.error = error + + def __str__(self): + if self.fname is not None: + message_part = " in {}.".format(self.fname) + else: + assert self.error is not None + message_part = ".\n{}\n".format(self.error.message) + return "Configuration file {}{}".format(self.reason, message_part) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/index/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/index/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7a17b7b3b6ad49157ee41f3da304fec3d32342d3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/index/__init__.py @@ -0,0 +1,2 @@ +"""Index interaction code +""" diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/index/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/index/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..64b930df2bb26519d614524a104fb3d2bd3144e5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/index/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/index/__pycache__/collector.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/index/__pycache__/collector.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..636cf1be0b4268816106354cebddfb0fa06b1bd5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/index/__pycache__/collector.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..37148f9757440dfc20614ee3bc3104f254c4dacf Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/index/collector.py b/backend/test/lib/python3.8/site-packages/pip/_internal/index/collector.py new file mode 100644 index 0000000000000000000000000000000000000000..8330793171a3946d2bdba22c81784711acfa5e75 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/index/collector.py @@ -0,0 +1,544 @@ +""" +The main purpose of this module is to expose LinkCollector.collect_links(). +""" + +import cgi +import itertools +import logging +import mimetypes +import os +from collections import OrderedDict + +from pip._vendor import html5lib, requests +from pip._vendor.distlib.compat import unescape +from pip._vendor.requests.exceptions import HTTPError, RetryError, SSLError +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip._internal.models.link import Link +from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS +from pip._internal.utils.misc import redact_auth_from_url +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url, url_to_path +from pip._internal.vcs import is_url, vcs + +if MYPY_CHECK_RUNNING: + from typing import ( + Callable, Iterable, List, MutableMapping, Optional, Sequence, Tuple, + Union, + ) + import xml.etree.ElementTree + + from pip._vendor.requests import Response + + from pip._internal.models.search_scope import SearchScope + from pip._internal.network.session import PipSession + + HTMLElement = xml.etree.ElementTree.Element + ResponseHeaders = MutableMapping[str, str] + + +logger = logging.getLogger(__name__) + + +def _match_vcs_scheme(url): + # type: (str) -> Optional[str] + """Look for VCS schemes in the URL. + + Returns the matched VCS scheme, or None if there's no match. + """ + for scheme in vcs.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + return scheme + return None + + +def _is_url_like_archive(url): + # type: (str) -> bool + """Return whether the URL looks like an archive. + """ + filename = Link(url).filename + for bad_ext in ARCHIVE_EXTENSIONS: + if filename.endswith(bad_ext): + return True + return False + + +class _NotHTML(Exception): + def __init__(self, content_type, request_desc): + # type: (str, str) -> None + super(_NotHTML, self).__init__(content_type, request_desc) + self.content_type = content_type + self.request_desc = request_desc + + +def _ensure_html_header(response): + # type: (Response) -> None + """Check the Content-Type header to ensure the response contains HTML. + + Raises `_NotHTML` if the content type is not text/html. + """ + content_type = response.headers.get("Content-Type", "") + if not content_type.lower().startswith("text/html"): + raise _NotHTML(content_type, response.request.method) + + +class _NotHTTP(Exception): + pass + + +def _ensure_html_response(url, session): + # type: (str, PipSession) -> None + """Send a HEAD request to the URL, and ensure the response contains HTML. + + Raises `_NotHTTP` if the URL is not available for a HEAD request, or + `_NotHTML` if the content type is not text/html. + """ + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) + if scheme not in {'http', 'https'}: + raise _NotHTTP() + + resp = session.head(url, allow_redirects=True) + resp.raise_for_status() + + _ensure_html_header(resp) + + +def _get_html_response(url, session): + # type: (str, PipSession) -> Response + """Access an HTML page with GET, and return the response. + + This consists of three parts: + + 1. If the URL looks suspiciously like an archive, send a HEAD first to + check the Content-Type is HTML, to avoid downloading a large file. + Raise `_NotHTTP` if the content type cannot be determined, or + `_NotHTML` if it is not HTML. + 2. Actually perform the request. Raise HTTP exceptions on network failures. + 3. Check the Content-Type header to make sure we got HTML, and raise + `_NotHTML` otherwise. + """ + if _is_url_like_archive(url): + _ensure_html_response(url, session=session) + + logger.debug('Getting page %s', redact_auth_from_url(url)) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + # We don't want to blindly returned cached data for + # /simple/, because authors generally expecting that + # twine upload && pip install will function, but if + # they've done a pip install in the last ~10 minutes + # it won't. Thus by setting this to zero we will not + # blindly use any cached data, however the benefit of + # using max-age=0 instead of no-cache, is that we will + # still support conditional requests, so we will still + # minimize traffic sent in cases where the page hasn't + # changed at all, we will just always incur the round + # trip for the conditional GET now instead of only + # once per 10 minutes. + # For more information, please see pypa/pip#5670. + "Cache-Control": "max-age=0", + }, + ) + resp.raise_for_status() + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + _ensure_html_header(resp) + + return resp + + +def _get_encoding_from_headers(headers): + # type: (ResponseHeaders) -> Optional[str] + """Determine if we have any encoding information in our headers. + """ + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + if "charset" in params: + return params['charset'] + return None + + +def _determine_base_url(document, page_url): + # type: (HTMLElement, str) -> str + """Determine the HTML document's base URL. + + This looks for a ``<base>`` tag in the HTML document. If present, its href + attribute denotes the base URL of anchor tags in the document. If there is + no such tag (or if it does not have a valid href attribute), the HTML + file's URL is used as the base URL. + + :param document: An HTML document representation. The current + implementation expects the result of ``html5lib.parse()``. + :param page_url: The URL of the HTML document. + """ + for base in document.findall(".//base"): + href = base.get("href") + if href is not None: + return href + return page_url + + +def _clean_link(url): + # type: (str) -> str + """Makes sure a link is fully encoded. That is, if a ' ' shows up in + the link, it will be rewritten to %20 (while not over-quoting + % or other characters).""" + # Split the URL into parts according to the general structure + # `scheme://netloc/path;parameters?query#fragment`. Note that the + # `netloc` can be empty and the URI will then refer to a local + # filesystem path. + result = urllib_parse.urlparse(url) + # In both cases below we unquote prior to quoting to make sure + # nothing is double quoted. + if result.netloc == "": + # On Windows the path part might contain a drive letter which + # should not be quoted. On Linux where drive letters do not + # exist, the colon should be quoted. We rely on urllib.request + # to do the right thing here. + path = urllib_request.pathname2url( + urllib_request.url2pathname(result.path)) + else: + # In addition to the `/` character we protect `@` so that + # revision strings in VCS URLs are properly parsed. + path = urllib_parse.quote(urllib_parse.unquote(result.path), safe="/@") + return urllib_parse.urlunparse(result._replace(path=path)) + + +def _create_link_from_element( + anchor, # type: HTMLElement + page_url, # type: str + base_url, # type: str +): + # type: (...) -> Optional[Link] + """ + Convert an anchor element in a simple repository page to a Link. + """ + href = anchor.get("href") + if not href: + return None + + url = _clean_link(urllib_parse.urljoin(base_url, href)) + pyrequire = anchor.get('data-requires-python') + pyrequire = unescape(pyrequire) if pyrequire else None + + yanked_reason = anchor.get('data-yanked') + if yanked_reason: + # This is a unicode string in Python 2 (and 3). + yanked_reason = unescape(yanked_reason) + + link = Link( + url, + comes_from=page_url, + requires_python=pyrequire, + yanked_reason=yanked_reason, + ) + + return link + + +def parse_links(page): + # type: (HTMLPage) -> Iterable[Link] + """ + Parse an HTML document, and yield its anchor elements as Link objects. + """ + document = html5lib.parse( + page.content, + transport_encoding=page.encoding, + namespaceHTMLElements=False, + ) + + url = page.url + base_url = _determine_base_url(document, url) + for anchor in document.findall(".//a"): + link = _create_link_from_element( + anchor, + page_url=url, + base_url=base_url, + ) + if link is None: + continue + yield link + + +class HTMLPage(object): + """Represents one page, along with its URL""" + + def __init__( + self, + content, # type: bytes + encoding, # type: Optional[str] + url, # type: str + ): + # type: (...) -> None + """ + :param encoding: the encoding to decode the given content. + :param url: the URL from which the HTML was downloaded. + """ + self.content = content + self.encoding = encoding + self.url = url + + def __str__(self): + # type: () -> str + return redact_auth_from_url(self.url) + + +def _handle_get_page_fail( + link, # type: Link + reason, # type: Union[str, Exception] + meth=None # type: Optional[Callable[..., None]] +): + # type: (...) -> None + if meth is None: + meth = logger.debug + meth("Could not fetch URL %s: %s - skipping", link, reason) + + +def _make_html_page(response): + # type: (Response) -> HTMLPage + encoding = _get_encoding_from_headers(response.headers) + return HTMLPage(response.content, encoding=encoding, url=response.url) + + +def _get_html_page(link, session=None): + # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] + if session is None: + raise TypeError( + "_get_html_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url.split('#', 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + vcs_scheme = _match_vcs_scheme(url) + if vcs_scheme: + logger.debug('Cannot look at %s URL %s', vcs_scheme, link) + return None + + # Tack index.html onto file:// URLs that point to directories + scheme, _, path, _, _, _ = urllib_parse.urlparse(url) + if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith('/'): + url += '/' + url = urllib_parse.urljoin(url, 'index.html') + logger.debug(' file: URL is directory, getting %s', url) + + try: + resp = _get_html_response(url, session=session) + except _NotHTTP: + logger.debug( + 'Skipping page %s because it looks like an archive, and cannot ' + 'be checked by HEAD.', link, + ) + except _NotHTML as exc: + logger.debug( + 'Skipping page %s because the %s request got Content-Type: %s', + link, exc.request_desc, exc.content_type, + ) + except HTTPError as exc: + _handle_get_page_fail(link, exc) + except RetryError as exc: + _handle_get_page_fail(link, exc) + except SSLError as exc: + reason = "There was a problem confirming the ssl certificate: " + reason += str(exc) + _handle_get_page_fail(link, reason, meth=logger.info) + except requests.ConnectionError as exc: + _handle_get_page_fail(link, "connection error: %s" % exc) + except requests.Timeout: + _handle_get_page_fail(link, "timed out") + else: + return _make_html_page(resp) + return None + + +def _remove_duplicate_links(links): + # type: (Iterable[Link]) -> List[Link] + """ + Return a list of links, with duplicates removed and ordering preserved. + """ + # We preserve the ordering when removing duplicates because we can. + return list(OrderedDict.fromkeys(links)) + + +def group_locations(locations, expand_dir=False): + # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] + """ + Divide a list of locations into two groups: "files" (archives) and "urls." + + :return: A pair of lists (files, urls). + """ + files = [] + urls = [] + + # puts the url for the given file path into the appropriate list + def sort_path(path): + # type: (str) -> None + url = path_to_url(path) + if mimetypes.guess_type(url, strict=False)[0] == 'text/html': + urls.append(url) + else: + files.append(url) + + for url in locations: + + is_local_path = os.path.exists(url) + is_file_url = url.startswith('file:') + + if is_local_path or is_file_url: + if is_local_path: + path = url + else: + path = url_to_path(url) + if os.path.isdir(path): + if expand_dir: + path = os.path.realpath(path) + for item in os.listdir(path): + sort_path(os.path.join(path, item)) + elif is_file_url: + urls.append(url) + else: + logger.warning( + "Path '{0}' is ignored: " + "it is a directory.".format(path), + ) + elif os.path.isfile(path): + sort_path(path) + else: + logger.warning( + "Url '%s' is ignored: it is neither a file " + "nor a directory.", url, + ) + elif is_url(url): + # Only add url with clear scheme + urls.append(url) + else: + logger.warning( + "Url '%s' is ignored. It is either a non-existing " + "path or lacks a specific scheme.", url, + ) + + return files, urls + + +class CollectedLinks(object): + + """ + Encapsulates the return value of a call to LinkCollector.collect_links(). + + The return value includes both URLs to project pages containing package + links, as well as individual package Link objects collected from other + sources. + + This info is stored separately as: + + (1) links from the configured file locations, + (2) links from the configured find_links, and + (3) urls to HTML project pages, as described by the PEP 503 simple + repository API. + """ + + def __init__( + self, + files, # type: List[Link] + find_links, # type: List[Link] + project_urls, # type: List[Link] + ): + # type: (...) -> None + """ + :param files: Links from file locations. + :param find_links: Links from find_links. + :param project_urls: URLs to HTML project pages, as described by + the PEP 503 simple repository API. + """ + self.files = files + self.find_links = find_links + self.project_urls = project_urls + + +class LinkCollector(object): + + """ + Responsible for collecting Link objects from all configured locations, + making network requests as needed. + + The class's main method is its collect_links() method. + """ + + def __init__( + self, + session, # type: PipSession + search_scope, # type: SearchScope + ): + # type: (...) -> None + self.search_scope = search_scope + self.session = session + + @property + def find_links(self): + # type: () -> List[str] + return self.search_scope.find_links + + def fetch_page(self, location): + # type: (Link) -> Optional[HTMLPage] + """ + Fetch an HTML page containing package links. + """ + return _get_html_page(location, session=self.session) + + def collect_links(self, project_name): + # type: (str) -> CollectedLinks + """Find all available links for the given project name. + + :return: All the Link objects (unfiltered), as a CollectedLinks object. + """ + search_scope = self.search_scope + index_locations = search_scope.get_index_urls_locations(project_name) + index_file_loc, index_url_loc = group_locations(index_locations) + fl_file_loc, fl_url_loc = group_locations( + self.find_links, expand_dir=True, + ) + + file_links = [ + Link(url) for url in itertools.chain(index_file_loc, fl_file_loc) + ] + + # We trust every directly linked archive in find_links + find_link_links = [Link(url, '-f') for url in self.find_links] + + # We trust every url that the user has given us whether it was given + # via --index-url or --find-links. + # We want to filter out anything that does not have a secure origin. + url_locations = [ + link for link in itertools.chain( + (Link(url) for url in index_url_loc), + (Link(url) for url in fl_url_loc), + ) + if self.session.is_secure_origin(link) + ] + + url_locations = _remove_duplicate_links(url_locations) + lines = [ + '{} location(s) to search for versions of {}:'.format( + len(url_locations), project_name, + ), + ] + for link in url_locations: + lines.append('* {}'.format(link)) + logger.debug('\n'.join(lines)) + + return CollectedLinks( + files=file_links, + find_links=find_link_links, + project_urls=url_locations, + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/index/package_finder.py b/backend/test/lib/python3.8/site-packages/pip/_internal/index/package_finder.py new file mode 100644 index 0000000000000000000000000000000000000000..a74d78db5a6c1738d54e784840f97daddff18776 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/index/package_finder.py @@ -0,0 +1,1013 @@ +"""Routines related to PyPI, indexes""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import logging +import re + +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + InvalidWheelFilename, + UnsupportedWheel, +) +from pip._internal.index.collector import parse_links +from pip._internal.models.candidate import InstallationCandidate +from pip._internal.models.format_control import FormatControl +from pip._internal.models.link import Link +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.models.target_python import TargetPython +from pip._internal.models.wheel import Wheel +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import build_netloc +from pip._internal.utils.packaging import check_requires_python +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS +from pip._internal.utils.urls import url_to_path + +if MYPY_CHECK_RUNNING: + from typing import ( + FrozenSet, Iterable, List, Optional, Set, Text, Tuple, Union, + ) + + from pip._vendor.packaging.tags import Tag + from pip._vendor.packaging.version import _BaseVersion + + from pip._internal.index.collector import LinkCollector + from pip._internal.models.search_scope import SearchScope + from pip._internal.req import InstallRequirement + from pip._internal.utils.hashes import Hashes + + BuildTag = Union[Tuple[()], Tuple[int, str]] + CandidateSortingKey = ( + Tuple[int, int, int, _BaseVersion, BuildTag, Optional[int]] + ) + + +__all__ = ['FormatControl', 'BestCandidateResult', 'PackageFinder'] + + +logger = logging.getLogger(__name__) + + +def _check_link_requires_python( + link, # type: Link + version_info, # type: Tuple[int, int, int] + ignore_requires_python=False, # type: bool +): + # type: (...) -> bool + """ + Return whether the given Python version is compatible with a link's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + """ + try: + is_compatible = check_requires_python( + link.requires_python, version_info=version_info, + ) + except specifiers.InvalidSpecifier: + logger.debug( + "Ignoring invalid Requires-Python (%r) for link: %s", + link.requires_python, link, + ) + else: + if not is_compatible: + version = '.'.join(map(str, version_info)) + if not ignore_requires_python: + logger.debug( + 'Link requires a different Python (%s not in: %r): %s', + version, link.requires_python, link, + ) + return False + + logger.debug( + 'Ignoring failed Requires-Python check (%s not in: %r) ' + 'for link: %s', + version, link.requires_python, link, + ) + + return True + + +class LinkEvaluator(object): + + """ + Responsible for evaluating links for a particular project. + """ + + _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + project_name, # type: str + canonical_name, # type: str + formats, # type: FrozenSet[str] + target_python, # type: TargetPython + allow_yanked, # type: bool + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + :param project_name: The user supplied package name. + :param canonical_name: The canonical package name. + :param formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. + :param target_python: The target Python interpreter to use when + evaluating link compatibility. This is used, for example, to + check wheel compatibility, as well as when checking the Python + version, e.g. the Python version embedded in a link filename + (or egg fragment) and against an HTML link's optional PEP 503 + "data-requires-python" attribute. + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param ignore_requires_python: Whether to ignore incompatible + PEP 503 "data-requires-python" values in HTML links. Defaults + to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self._allow_yanked = allow_yanked + self._canonical_name = canonical_name + self._ignore_requires_python = ignore_requires_python + self._formats = formats + self._target_python = target_python + + self.project_name = project_name + + def evaluate_link(self, link): + # type: (Link) -> Tuple[bool, Optional[Text]] + """ + Determine whether a link is a candidate for installation. + + :return: A tuple (is_candidate, result), where `result` is (1) a + version string if `is_candidate` is True, and (2) if + `is_candidate` is False, an optional string to log the reason + the link fails to qualify. + """ + version = None + if link.is_yanked and not self._allow_yanked: + reason = link.yanked_reason or '<none given>' + # Mark this as a unicode string to prevent "UnicodeEncodeError: + # 'ascii' codec can't encode character" in Python 2 when + # the reason contains non-ascii characters. + return (False, u'yanked for reason: {}'.format(reason)) + + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + return (False, 'not a file') + if ext not in SUPPORTED_EXTENSIONS: + return (False, 'unsupported archive format: %s' % ext) + if "binary" not in self._formats and ext == WHEEL_EXTENSION: + reason = 'No binaries permitted for %s' % self.project_name + return (False, reason) + if "macosx10" in link.path and ext == '.zip': + return (False, 'macosx10 one') + if ext == WHEEL_EXTENSION: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + return (False, 'invalid wheel filename') + if canonicalize_name(wheel.name) != self._canonical_name: + reason = 'wrong project name (not %s)' % self.project_name + return (False, reason) + + supported_tags = self._target_python.get_tags() + if not wheel.supported(supported_tags): + # Include the wheel's tags in the reason string to + # simplify troubleshooting compatibility issues. + file_tags = wheel.get_formatted_file_tags() + reason = ( + "none of the wheel's tags match: {}".format( + ', '.join(file_tags) + ) + ) + return (False, reason) + + version = wheel.version + + # This should be up by the self.ok_binary check, but see issue 2700. + if "source" not in self._formats and ext != WHEEL_EXTENSION: + return (False, 'No sources permitted for %s' % self.project_name) + + if not version: + version = _extract_version_from_fragment( + egg_info, self._canonical_name, + ) + if not version: + return ( + False, 'Missing project version for %s' % self.project_name, + ) + + match = self._py_version_re.search(version) + if match: + version = version[:match.start()] + py_version = match.group(1) + if py_version != self._target_python.py_version: + return (False, 'Python version is incorrect') + + supports_python = _check_link_requires_python( + link, version_info=self._target_python.py_version_info, + ignore_requires_python=self._ignore_requires_python, + ) + if not supports_python: + # Return None for the reason text to suppress calling + # _log_skipped_link(). + return (False, None) + + logger.debug('Found link %s, version: %s', link, version) + + return (True, version) + + +def filter_unallowed_hashes( + candidates, # type: List[InstallationCandidate] + hashes, # type: Hashes + project_name, # type: str +): + # type: (...) -> List[InstallationCandidate] + """ + Filter out candidates whose hashes aren't allowed, and return a new + list of candidates. + + If at least one candidate has an allowed hash, then all candidates with + either an allowed hash or no hash specified are returned. Otherwise, + the given candidates are returned. + + Including the candidates with no hash specified when there is a match + allows a warning to be logged if there is a more preferred candidate + with no hash specified. Returning all candidates in the case of no + matches lets pip report the hash of the candidate that would otherwise + have been installed (e.g. permitting the user to more easily update + their requirements file with the desired hash). + """ + if not hashes: + logger.debug( + 'Given no hashes to check %s links for project %r: ' + 'discarding no candidates', + len(candidates), + project_name, + ) + # Make sure we're not returning back the given value. + return list(candidates) + + matches_or_no_digest = [] + # Collect the non-matches for logging purposes. + non_matches = [] + match_count = 0 + for candidate in candidates: + link = candidate.link + if not link.has_hash: + pass + elif link.is_hash_allowed(hashes=hashes): + match_count += 1 + else: + non_matches.append(candidate) + continue + + matches_or_no_digest.append(candidate) + + if match_count: + filtered = matches_or_no_digest + else: + # Make sure we're not returning back the given value. + filtered = list(candidates) + + if len(filtered) == len(candidates): + discard_message = 'discarding no candidates' + else: + discard_message = 'discarding {} non-matches:\n {}'.format( + len(non_matches), + '\n '.join(str(candidate.link) for candidate in non_matches) + ) + + logger.debug( + 'Checked %s links for project %r against %s hashes ' + '(%s matches, %s no digest): %s', + len(candidates), + project_name, + hashes.digest_count, + match_count, + len(matches_or_no_digest) - match_count, + discard_message + ) + + return filtered + + +class CandidatePreferences(object): + + """ + Encapsulates some of the preferences for filtering and sorting + InstallationCandidate objects. + """ + + def __init__( + self, + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + ): + # type: (...) -> None + """ + :param allow_all_prereleases: Whether to allow all pre-releases. + """ + self.allow_all_prereleases = allow_all_prereleases + self.prefer_binary = prefer_binary + + +class BestCandidateResult(object): + """A collection of candidates, returned by `PackageFinder.find_best_candidate`. + + This class is only intended to be instantiated by CandidateEvaluator's + `compute_best_candidate()` method. + """ + + def __init__( + self, + candidates, # type: List[InstallationCandidate] + applicable_candidates, # type: List[InstallationCandidate] + best_candidate, # type: Optional[InstallationCandidate] + ): + # type: (...) -> None + """ + :param candidates: A sequence of all available candidates found. + :param applicable_candidates: The applicable candidates. + :param best_candidate: The most preferred candidate found, or None + if no applicable candidates were found. + """ + assert set(applicable_candidates) <= set(candidates) + + if best_candidate is None: + assert not applicable_candidates + else: + assert best_candidate in applicable_candidates + + self._applicable_candidates = applicable_candidates + self._candidates = candidates + + self.best_candidate = best_candidate + + def iter_all(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through all candidates. + """ + return iter(self._candidates) + + def iter_applicable(self): + # type: () -> Iterable[InstallationCandidate] + """Iterate through the applicable candidates. + """ + return iter(self._applicable_candidates) + + +class CandidateEvaluator(object): + + """ + Responsible for filtering and sorting candidates for installation based + on what tags are valid. + """ + + @classmethod + def create( + cls, + project_name, # type: str + target_python=None, # type: Optional[TargetPython] + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object. + + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + :param hashes: An optional collection of allowed hashes. + """ + if target_python is None: + target_python = TargetPython() + if specifier is None: + specifier = specifiers.SpecifierSet() + + supported_tags = target_python.get_tags() + + return cls( + project_name=project_name, + supported_tags=supported_tags, + specifier=specifier, + prefer_binary=prefer_binary, + allow_all_prereleases=allow_all_prereleases, + hashes=hashes, + ) + + def __init__( + self, + project_name, # type: str + supported_tags, # type: List[Tag] + specifier, # type: specifiers.BaseSpecifier + prefer_binary=False, # type: bool + allow_all_prereleases=False, # type: bool + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> None + """ + :param supported_tags: The PEP 425 tags supported by the target + Python in order of preference (most preferred first). + """ + self._allow_all_prereleases = allow_all_prereleases + self._hashes = hashes + self._prefer_binary = prefer_binary + self._project_name = project_name + self._specifier = specifier + self._supported_tags = supported_tags + + def get_applicable_candidates( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> List[InstallationCandidate] + """ + Return the applicable candidates from a list of candidates. + """ + # Using None infers from the specifier instead. + allow_prereleases = self._allow_all_prereleases or None + specifier = self._specifier + versions = { + str(v) for v in specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + (str(c.version) for c in candidates), + prereleases=allow_prereleases, + ) + } + + # Again, converting version to str to deal with debundling. + applicable_candidates = [ + c for c in candidates if str(c.version) in versions + ] + + filtered_applicable_candidates = filter_unallowed_hashes( + candidates=applicable_candidates, + hashes=self._hashes, + project_name=self._project_name, + ) + + return sorted(filtered_applicable_candidates, key=self._sort_key) + + def _sort_key(self, candidate): + # type: (InstallationCandidate) -> CandidateSortingKey + """ + Function to pass as the `key` argument to a call to sorted() to sort + InstallationCandidates by preference. + + Returns a tuple such that tuples sorting as greater using Python's + default comparison operator are more preferred. + + The preference is as follows: + + First and foremost, candidates with allowed (matching) hashes are + always preferred over candidates without matching hashes. This is + because e.g. if the only candidate with an allowed hash is yanked, + we still want to use that candidate. + + Second, excepting hash considerations, candidates that have been + yanked (in the sense of PEP 592) are always less preferred than + candidates that haven't been yanked. Then: + + If not finding wheels, they are sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min(self._supported_tags) + 3. source archives + If prefer_binary was set, then all wheels are sorted above sources. + + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + valid_tags = self._supported_tags + support_num = len(valid_tags) + build_tag = () # type: BuildTag + binary_preference = 0 + link = candidate.link + if link.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(link.filename) + if not wheel.supported(valid_tags): + raise UnsupportedWheel( + "%s is not a supported wheel for this platform. It " + "can't be sorted." % wheel.filename + ) + if self._prefer_binary: + binary_preference = 1 + pri = -(wheel.support_index_min(valid_tags)) + if wheel.build_tag is not None: + match = re.match(r'^(\d+)(.*)$', wheel.build_tag) + build_tag_groups = match.groups() + build_tag = (int(build_tag_groups[0]), build_tag_groups[1]) + else: # sdist + pri = -(support_num) + has_allowed_hash = int(link.is_hash_allowed(self._hashes)) + yank_value = -1 * int(link.is_yanked) # -1 for yanked. + return ( + has_allowed_hash, yank_value, binary_preference, candidate.version, + build_tag, pri, + ) + + def sort_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> Optional[InstallationCandidate] + """ + Return the best candidate per the instance's sort order, or None if + no candidate is acceptable. + """ + if not candidates: + return None + + best_candidate = max(candidates, key=self._sort_key) + + # Log a warning per PEP 592 if necessary before returning. + link = best_candidate.link + if link.is_yanked: + reason = link.yanked_reason or '<none given>' + msg = ( + # Mark this as a unicode string to prevent + # "UnicodeEncodeError: 'ascii' codec can't encode character" + # in Python 2 when the reason contains non-ascii characters. + u'The candidate selected for download or install is a ' + 'yanked version: {candidate}\n' + 'Reason for being yanked: {reason}' + ).format(candidate=best_candidate, reason=reason) + logger.warning(msg) + + return best_candidate + + def compute_best_candidate( + self, + candidates, # type: List[InstallationCandidate] + ): + # type: (...) -> BestCandidateResult + """ + Compute and return a `BestCandidateResult` instance. + """ + applicable_candidates = self.get_applicable_candidates(candidates) + + best_candidate = self.sort_best_candidate(applicable_candidates) + + return BestCandidateResult( + candidates, + applicable_candidates=applicable_candidates, + best_candidate=best_candidate, + ) + + +class PackageFinder(object): + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__( + self, + link_collector, # type: LinkCollector + target_python, # type: TargetPython + allow_yanked, # type: bool + format_control=None, # type: Optional[FormatControl] + candidate_prefs=None, # type: CandidatePreferences + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """ + This constructor is primarily meant to be used by the create() class + method and from tests. + + :param format_control: A FormatControl object, used to control + the selection of source packages / binary packages when consulting + the index and links. + :param candidate_prefs: Options to use when creating a + CandidateEvaluator object. + """ + if candidate_prefs is None: + candidate_prefs = CandidatePreferences() + + format_control = format_control or FormatControl(set(), set()) + + self._allow_yanked = allow_yanked + self._candidate_prefs = candidate_prefs + self._ignore_requires_python = ignore_requires_python + self._link_collector = link_collector + self._target_python = target_python + + self.format_control = format_control + + # These are boring links that have already been logged somehow. + self._logged_links = set() # type: Set[Link] + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + @classmethod + def create( + cls, + link_collector, # type: LinkCollector + selection_prefs, # type: SelectionPreferences + target_python=None, # type: Optional[TargetPython] + ): + # type: (...) -> PackageFinder + """Create a PackageFinder. + + :param selection_prefs: The candidate selection preferences, as a + SelectionPreferences object. + :param target_python: The target Python interpreter to use when + checking compatibility. If None (the default), a TargetPython + object will be constructed from the running Python. + """ + if target_python is None: + target_python = TargetPython() + + candidate_prefs = CandidatePreferences( + prefer_binary=selection_prefs.prefer_binary, + allow_all_prereleases=selection_prefs.allow_all_prereleases, + ) + + return cls( + candidate_prefs=candidate_prefs, + link_collector=link_collector, + target_python=target_python, + allow_yanked=selection_prefs.allow_yanked, + format_control=selection_prefs.format_control, + ignore_requires_python=selection_prefs.ignore_requires_python, + ) + + @property + def search_scope(self): + # type: () -> SearchScope + return self._link_collector.search_scope + + @search_scope.setter + def search_scope(self, search_scope): + # type: (SearchScope) -> None + self._link_collector.search_scope = search_scope + + @property + def find_links(self): + # type: () -> List[str] + return self._link_collector.find_links + + @property + def index_urls(self): + # type: () -> List[str] + return self.search_scope.index_urls + + @property + def trusted_hosts(self): + # type: () -> Iterable[str] + for host_port in self._link_collector.session.pip_trusted_origins: + yield build_netloc(*host_port) + + @property + def allow_all_prereleases(self): + # type: () -> bool + return self._candidate_prefs.allow_all_prereleases + + def set_allow_all_prereleases(self): + # type: () -> None + self._candidate_prefs.allow_all_prereleases = True + + def make_link_evaluator(self, project_name): + # type: (str) -> LinkEvaluator + canonical_name = canonicalize_name(project_name) + formats = self.format_control.get_allowed_formats(canonical_name) + + return LinkEvaluator( + project_name=project_name, + canonical_name=canonical_name, + formats=formats, + target_python=self._target_python, + allow_yanked=self._allow_yanked, + ignore_requires_python=self._ignore_requires_python, + ) + + def _sort_links(self, links): + # type: (Iterable[Link]) -> List[Link] + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen = set() # type: Set[Link] + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _log_skipped_link(self, link, reason): + # type: (Link, Text) -> None + if link not in self._logged_links: + # Mark this as a unicode string to prevent "UnicodeEncodeError: + # 'ascii' codec can't encode character" in Python 2 when + # the reason contains non-ascii characters. + # Also, put the link at the end so the reason is more visible + # and because the link string is usually very long. + logger.debug(u'Skipping link: %s: %s', reason, link) + self._logged_links.add(link) + + def get_install_candidate(self, link_evaluator, link): + # type: (LinkEvaluator, Link) -> Optional[InstallationCandidate] + """ + If the link is a candidate for install, convert it to an + InstallationCandidate and return it. Otherwise, return None. + """ + is_candidate, result = link_evaluator.evaluate_link(link) + if not is_candidate: + if result: + self._log_skipped_link(link, reason=result) + return None + + return InstallationCandidate( + name=link_evaluator.project_name, + link=link, + # Convert the Text result to str since InstallationCandidate + # accepts str. + version=str(result), + ) + + def evaluate_links(self, link_evaluator, links): + # type: (LinkEvaluator, Iterable[Link]) -> List[InstallationCandidate] + """ + Convert links that are candidates to InstallationCandidate objects. + """ + candidates = [] + for link in self._sort_links(links): + candidate = self.get_install_candidate(link_evaluator, link) + if candidate is not None: + candidates.append(candidate) + + return candidates + + def process_project_url(self, project_url, link_evaluator): + # type: (Link, LinkEvaluator) -> List[InstallationCandidate] + logger.debug( + 'Fetching project page and analyzing links: %s', project_url, + ) + html_page = self._link_collector.fetch_page(project_url) + if html_page is None: + return [] + + page_links = list(parse_links(html_page)) + + with indent_log(): + package_links = self.evaluate_links( + link_evaluator, + links=page_links, + ) + + return package_links + + def find_all_candidates(self, project_name): + # type: (str) -> List[InstallationCandidate] + """Find all available InstallationCandidate for project_name + + This checks index_urls and find_links. + All versions found are returned as an InstallationCandidate list. + + See LinkEvaluator.evaluate_link() for details on which files + are accepted. + """ + collected_links = self._link_collector.collect_links(project_name) + + link_evaluator = self.make_link_evaluator(project_name) + + find_links_versions = self.evaluate_links( + link_evaluator, + links=collected_links.find_links, + ) + + page_versions = [] + for project_url in collected_links.project_urls: + package_links = self.process_project_url( + project_url, link_evaluator=link_evaluator, + ) + page_versions.extend(package_links) + + file_versions = self.evaluate_links( + link_evaluator, + links=collected_links.files, + ) + if file_versions: + file_versions.sort(reverse=True) + logger.debug( + 'Local files found: %s', + ', '.join([ + url_to_path(candidate.link.url) + for candidate in file_versions + ]) + ) + + # This is an intentional priority ordering + return file_versions + find_links_versions + page_versions + + def make_candidate_evaluator( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> CandidateEvaluator + """Create a CandidateEvaluator object to use. + """ + candidate_prefs = self._candidate_prefs + return CandidateEvaluator.create( + project_name=project_name, + target_python=self._target_python, + prefer_binary=candidate_prefs.prefer_binary, + allow_all_prereleases=candidate_prefs.allow_all_prereleases, + specifier=specifier, + hashes=hashes, + ) + + def find_best_candidate( + self, + project_name, # type: str + specifier=None, # type: Optional[specifiers.BaseSpecifier] + hashes=None, # type: Optional[Hashes] + ): + # type: (...) -> BestCandidateResult + """Find matches for the given project and specifier. + + :param specifier: An optional object implementing `filter` + (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable + versions. + + :return: A `BestCandidateResult` instance. + """ + candidates = self.find_all_candidates(project_name) + candidate_evaluator = self.make_candidate_evaluator( + project_name=project_name, + specifier=specifier, + hashes=hashes, + ) + return candidate_evaluator.compute_best_candidate(candidates) + + def find_requirement(self, req, upgrade): + # type: (InstallRequirement, bool) -> Optional[Link] + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a Link if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + hashes = req.hashes(trust_internet=False) + best_candidate_result = self.find_best_candidate( + req.name, specifier=req.specifier, hashes=hashes, + ) + best_candidate = best_candidate_result.best_candidate + + installed_version = None # type: Optional[_BaseVersion] + if req.satisfied_by is not None: + installed_version = parse_version(req.satisfied_by.version) + + def _format_versions(cand_iter): + # type: (Iterable[InstallationCandidate]) -> str + # This repeated parse_version and str() conversion is needed to + # handle different vendoring sources from pip and pkg_resources. + # If we stop using the pkg_resources provided specifier and start + # using our own, we can drop the cast to str(). + return ", ".join(sorted( + {str(c.version) for c in cand_iter}, + key=parse_version, + )) or "none" + + if installed_version is None and best_candidate is None: + logger.critical( + 'Could not find a version that satisfies the requirement %s ' + '(from versions: %s)', + req, + _format_versions(best_candidate_result.iter_all()), + ) + + raise DistributionNotFound( + 'No matching distribution found for %s' % req + ) + + best_installed = False + if installed_version and ( + best_candidate is None or + best_candidate.version <= installed_version): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + 'Existing installed version (%s) is most up-to-date and ' + 'satisfies requirement', + installed_version, + ) + else: + logger.debug( + 'Existing installed version (%s) satisfies requirement ' + '(most up-to-date version is %s)', + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + 'Installed version (%s) is most up-to-date (past versions: ' + '%s)', + installed_version, + _format_versions(best_candidate_result.iter_applicable()), + ) + raise BestVersionAlreadyInstalled + + logger.debug( + 'Using version %s (newest of versions: %s)', + best_candidate.version, + _format_versions(best_candidate_result.iter_applicable()), + ) + return best_candidate.link + + +def _find_name_version_sep(fragment, canonical_name): + # type: (str, str) -> int + """Find the separator's index based on the package's canonical name. + + :param fragment: A <package>+<version> filename "fragment" (stem) or + egg fragment. + :param canonical_name: The package's canonical name. + + This function is needed since the canonicalized name does not necessarily + have the same length as the egg info's name part. An example:: + + >>> fragment = 'foo__bar-1.0' + >>> canonical_name = 'foo-bar' + >>> _find_name_version_sep(fragment, canonical_name) + 8 + """ + # Project name and version must be separated by one single dash. Find all + # occurrences of dashes; if the string in front of it matches the canonical + # name, this is the one separating the name and version parts. + for i, c in enumerate(fragment): + if c != "-": + continue + if canonicalize_name(fragment[:i]) == canonical_name: + return i + raise ValueError("{} does not match {}".format(fragment, canonical_name)) + + +def _extract_version_from_fragment(fragment, canonical_name): + # type: (str, str) -> Optional[str] + """Parse the version string from a <package>+<version> filename + "fragment" (stem) or egg fragment. + + :param fragment: The string to parse. E.g. foo-2.1 + :param canonical_name: The canonicalized name of the package this + belongs to. + """ + try: + version_start = _find_name_version_sep(fragment, canonical_name) + 1 + except ValueError: + return None + version = fragment[version_start:] + if not version: + return None + return version diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/legacy_resolve.py b/backend/test/lib/python3.8/site-packages/pip/_internal/legacy_resolve.py new file mode 100644 index 0000000000000000000000000000000000000000..ca269121b60c1b792fbc1a08000c4f2e4503e706 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/legacy_resolve.py @@ -0,0 +1,430 @@ +"""Dependency Resolution + +The dependency resolution in pip is performed as follows: + +for top-level requirements: + a. only one spec allowed per project, regardless of conflicts or not. + otherwise a "double requirement" exception is raised + b. they override sub-dependency requirements. +for sub-dependencies + a. "first found, wins" (where the order is breadth first) +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + +import logging +import sys +from collections import defaultdict +from itertools import chain + +from pip._vendor.packaging import specifiers + +from pip._internal.exceptions import ( + BestVersionAlreadyInstalled, + DistributionNotFound, + HashError, + HashErrors, + UnsupportedPythonVersion, +) +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import dist_in_usersite, normalize_version_info +from pip._internal.utils.packaging import ( + check_requires_python, + get_requires_python, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Callable, DefaultDict, List, Optional, Set, Tuple + from pip._vendor import pkg_resources + + from pip._internal.distributions import AbstractDistribution + from pip._internal.index.package_finder import PackageFinder + from pip._internal.operations.prepare import RequirementPreparer + from pip._internal.req.req_install import InstallRequirement + from pip._internal.req.req_set import RequirementSet + + InstallRequirementProvider = Callable[ + [str, InstallRequirement], InstallRequirement + ] + DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]] + +logger = logging.getLogger(__name__) + + +def _check_dist_requires_python( + dist, # type: pkg_resources.Distribution + version_info, # type: Tuple[int, int, int] + ignore_requires_python=False, # type: bool +): + # type: (...) -> None + """ + Check whether the given Python version is compatible with a distribution's + "Requires-Python" value. + + :param version_info: A 3-tuple of ints representing the Python + major-minor-micro version to check. + :param ignore_requires_python: Whether to ignore the "Requires-Python" + value if the given Python version isn't compatible. + + :raises UnsupportedPythonVersion: When the given Python version isn't + compatible. + """ + requires_python = get_requires_python(dist) + try: + is_compatible = check_requires_python( + requires_python, version_info=version_info, + ) + except specifiers.InvalidSpecifier as exc: + logger.warning( + "Package %r has an invalid Requires-Python: %s", + dist.project_name, exc, + ) + return + + if is_compatible: + return + + version = '.'.join(map(str, version_info)) + if ignore_requires_python: + logger.debug( + 'Ignoring failed Requires-Python check for package %r: ' + '%s not in %r', + dist.project_name, version, requires_python, + ) + return + + raise UnsupportedPythonVersion( + 'Package {!r} requires a different Python: {} not in {!r}'.format( + dist.project_name, version, requires_python, + )) + + +class Resolver(object): + """Resolves which packages need to be installed/uninstalled to perform \ + the requested operation without breaking the requirements of any package. + """ + + _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} + + def __init__( + self, + preparer, # type: RequirementPreparer + finder, # type: PackageFinder + make_install_req, # type: InstallRequirementProvider + use_user_site, # type: bool + ignore_dependencies, # type: bool + ignore_installed, # type: bool + ignore_requires_python, # type: bool + force_reinstall, # type: bool + upgrade_strategy, # type: str + py_version_info=None, # type: Optional[Tuple[int, ...]] + ): + # type: (...) -> None + super(Resolver, self).__init__() + assert upgrade_strategy in self._allowed_strategies + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + self._py_version_info = py_version_info + + self.preparer = preparer + self.finder = finder + + self.upgrade_strategy = upgrade_strategy + self.force_reinstall = force_reinstall + self.ignore_dependencies = ignore_dependencies + self.ignore_installed = ignore_installed + self.ignore_requires_python = ignore_requires_python + self.use_user_site = use_user_site + self._make_install_req = make_install_req + + self._discovered_dependencies = \ + defaultdict(list) # type: DiscoveredDependencies + + def resolve(self, requirement_set): + # type: (RequirementSet) -> None + """Resolve what operations need to be done + + As a side-effect of this method, the packages (and their dependencies) + are downloaded, unpacked and prepared for installation. This + preparation is done by ``pip.operations.prepare``. + + Once PyPI has static dependency metadata available, it would be + possible to move the preparation to become a step separated from + dependency resolution. + """ + # If any top-level requirement has a hash specified, enter + # hash-checking mode, which requires hashes from all. + root_reqs = ( + requirement_set.unnamed_requirements + + list(requirement_set.requirements.values()) + ) + + # Actually prepare the files, and collect any exceptions. Most hash + # exceptions cannot be checked ahead of time, because + # req.populate_link() needs to be called before we can make decisions + # based on link type. + discovered_reqs = [] # type: List[InstallRequirement] + hash_errors = HashErrors() + for req in chain(root_reqs, discovered_reqs): + try: + discovered_reqs.extend(self._resolve_one(requirement_set, req)) + except HashError as exc: + exc.req = req + hash_errors.append(exc) + + if hash_errors: + raise hash_errors + + def _is_upgrade_allowed(self, req): + # type: (InstallRequirement) -> bool + if self.upgrade_strategy == "to-satisfy-only": + return False + elif self.upgrade_strategy == "eager": + return True + else: + assert self.upgrade_strategy == "only-if-needed" + return req.is_direct + + def _set_req_to_reinstall(self, req): + # type: (InstallRequirement) -> None + """ + Set a requirement to be installed. + """ + # Don't uninstall the conflict if doing a user install and the + # conflict is not a user install. + if not self.use_user_site or dist_in_usersite(req.satisfied_by): + req.should_reinstall = True + req.satisfied_by = None + + def _check_skip_installed(self, req_to_install): + # type: (InstallRequirement) -> Optional[str] + """Check if req_to_install should be skipped. + + This will check if the req is installed, and whether we should upgrade + or reinstall it, taking into account all the relevant user options. + + After calling this req_to_install will only have satisfied_by set to + None if the req_to_install is to be upgraded/reinstalled etc. Any + other value will be a dist recording the current thing installed that + satisfies the requirement. + + Note that for vcs urls and the like we can't assess skipping in this + routine - we simply identify that we need to pull the thing down, + then later on it is pulled down and introspected to assess upgrade/ + reinstalls etc. + + :return: A text reason for why it was skipped, or None. + """ + if self.ignore_installed: + return None + + req_to_install.check_if_exists(self.use_user_site) + if not req_to_install.satisfied_by: + return None + + if self.force_reinstall: + self._set_req_to_reinstall(req_to_install) + return None + + if not self._is_upgrade_allowed(req_to_install): + if self.upgrade_strategy == "only-if-needed": + return 'already satisfied, skipping upgrade' + return 'already satisfied' + + # Check for the possibility of an upgrade. For link-based + # requirements we have to pull the tree down and inspect to assess + # the version #, so it's handled way down. + if not req_to_install.link: + try: + self.finder.find_requirement(req_to_install, upgrade=True) + except BestVersionAlreadyInstalled: + # Then the best version is installed. + return 'already up-to-date' + except DistributionNotFound: + # No distribution found, so we squash the error. It will + # be raised later when we re-try later to do the install. + # Why don't we just raise here? + pass + + self._set_req_to_reinstall(req_to_install) + return None + + def _get_abstract_dist_for(self, req): + # type: (InstallRequirement) -> AbstractDistribution + """Takes a InstallRequirement and returns a single AbstractDist \ + representing a prepared variant of the same. + """ + if req.editable: + return self.preparer.prepare_editable_requirement(req) + + # satisfied_by is only evaluated by calling _check_skip_installed, + # so it must be None here. + assert req.satisfied_by is None + skip_reason = self._check_skip_installed(req) + + if req.satisfied_by: + return self.preparer.prepare_installed_requirement( + req, skip_reason + ) + + upgrade_allowed = self._is_upgrade_allowed(req) + + # We eagerly populate the link, since that's our "legacy" behavior. + require_hashes = self.preparer.require_hashes + req.populate_link(self.finder, upgrade_allowed, require_hashes) + abstract_dist = self.preparer.prepare_linked_requirement(req) + + # NOTE + # The following portion is for determining if a certain package is + # going to be re-installed/upgraded or not and reporting to the user. + # This should probably get cleaned up in a future refactor. + + # req.req is only avail after unpack for URL + # pkgs repeat check_if_exists to uninstall-on-upgrade + # (#14) + if not self.ignore_installed: + req.check_if_exists(self.use_user_site) + + if req.satisfied_by: + should_modify = ( + self.upgrade_strategy != "to-satisfy-only" or + self.force_reinstall or + self.ignore_installed or + req.link.scheme == 'file' + ) + if should_modify: + self._set_req_to_reinstall(req) + else: + logger.info( + 'Requirement already satisfied (use --upgrade to upgrade):' + ' %s', req, + ) + + return abstract_dist + + def _resolve_one( + self, + requirement_set, # type: RequirementSet + req_to_install, # type: InstallRequirement + ): + # type: (...) -> List[InstallRequirement] + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. + """ + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) + if req_to_install.constraint or req_to_install.prepared: + return [] + + req_to_install.prepared = True + + # register tmp src for cleanup in case something goes wrong + requirement_set.reqs_to_cleanup.append(req_to_install) + + abstract_dist = self._get_abstract_dist_for(req_to_install) + + # Parse and return dependencies + dist = abstract_dist.get_pkg_resources_distribution() + # This will raise UnsupportedPythonVersion if the given Python + # version isn't compatible with the distribution's Requires-Python. + _check_dist_requires_python( + dist, version_info=self._py_version_info, + ignore_requires_python=self.ignore_requires_python, + ) + + more_reqs = [] # type: List[InstallRequirement] + + def add_req(subreq, extras_requested): + sub_install_req = self._make_install_req( + str(subreq), + req_to_install, + ) + parent_req_name = req_to_install.name + to_scan_again, add_to_parent = requirement_set.add_requirement( + sub_install_req, + parent_req_name=parent_req_name, + extras_requested=extras_requested, + ) + if parent_req_name and add_to_parent: + self._discovered_dependencies[parent_req_name].append( + add_to_parent + ) + more_reqs.extend(to_scan_again) + + with indent_log(): + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + if not requirement_set.has_requirement(req_to_install.name): + # 'unnamed' requirements will get added here + # 'unnamed' requirements can only come from being directly + # provided by the user. + assert req_to_install.is_direct + requirement_set.add_requirement( + req_to_install, parent_req_name=None, + ) + + if not self.ignore_dependencies: + if req_to_install.extras: + logger.debug( + "Installing extra requirements: %r", + ','.join(req_to_install.extras), + ) + missing_requested = sorted( + set(req_to_install.extras) - set(dist.extras) + ) + for missing in missing_requested: + logger.warning( + '%s does not provide the extra \'%s\'', + dist, missing + ) + + available_requested = sorted( + set(dist.extras) & set(req_to_install.extras) + ) + for subreq in dist.requires(available_requested): + add_req(subreq, extras_requested=available_requested) + + if not req_to_install.editable and not req_to_install.satisfied_by: + # XXX: --no-install leads this to report 'Successfully + # downloaded' for only non-editable reqs, even though we took + # action on them. + requirement_set.successfully_downloaded.append(req_to_install) + + return more_reqs + + def get_installation_order(self, req_set): + # type: (RequirementSet) -> List[InstallRequirement] + """Create the installation order. + + The installation order is topological - requirements are installed + before the requiring thing. We break cycles at an arbitrary point, + and make no other guarantees. + """ + # The current implementation, which we may change at any point + # installs the user specified things in the order given, except when + # dependencies must come earlier to achieve topological order. + order = [] + ordered_reqs = set() # type: Set[InstallRequirement] + + def schedule(req): + if req.satisfied_by or req in ordered_reqs: + return + if req.constraint: + return + ordered_reqs.add(req) + for dep in self._discovered_dependencies[req.name]: + schedule(dep) + order.append(req) + + for install_req in req_set.requirements.values(): + schedule(install_req) + return order diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/locations.py b/backend/test/lib/python3.8/site-packages/pip/_internal/locations.py new file mode 100644 index 0000000000000000000000000000000000000000..0c115531911af77f5eab69775c7cdd8e43b47e1d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/locations.py @@ -0,0 +1,194 @@ +"""Locations where we look for configs, install stuff, etc""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import os +import os.path +import platform +import site +import sys +import sysconfig +from distutils import sysconfig as distutils_sysconfig +from distutils.command.install import SCHEME_KEYS # type: ignore +from distutils.command.install import install as distutils_install_command + +from pip._internal.models.scheme import Scheme +from pip._internal.utils import appdirs +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast +from pip._internal.utils.virtualenv import running_under_virtualenv + +if MYPY_CHECK_RUNNING: + from typing import Dict, List, Optional, Union + + from distutils.cmd import Command as DistutilsCommand + + +# Application Directories +USER_CACHE_DIR = appdirs.user_cache_dir("pip") + + +def get_major_minor_version(): + # type: () -> str + """ + Return the major-minor version of the current Python as a string, e.g. + "3.7" or "3.10". + """ + return '{}.{}'.format(*sys.version_info) + + +def get_src_prefix(): + # type: () -> str + if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, 'src') + else: + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), 'src') + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit( + "The folder you are executing pip from can no longer be found." + ) + + # under macOS + virtualenv sys.prefix is not properly resolved + # it is something like /path/to/python/bin/.. + return os.path.abspath(src_prefix) + + +# FIXME doesn't account for venv linked to global site-packages + +site_packages = sysconfig.get_path("purelib") # type: Optional[str] + +# This is because of a bug in PyPy's sysconfig module, see +# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths +# for more information. +if platform.python_implementation().lower() == "pypy": + site_packages = distutils_sysconfig.get_python_lib() +try: + # Use getusersitepackages if this is present, as it ensures that the + # value is initialised properly. + user_site = site.getusersitepackages() +except AttributeError: + user_site = site.USER_SITE + +if WINDOWS: + bin_py = os.path.join(sys.prefix, 'Scripts') + bin_user = os.path.join(user_site, 'Scripts') + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') +else: + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') + + # Forcing to use /usr/local/bin for standard macOS framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': + bin_py = '/usr/local/bin' + + +def distutils_scheme( + dist_name, user=False, home=None, root=None, isolated=False, prefix=None +): + # type:(str, bool, str, str, bool, str) -> Dict[str, str] + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]] + if isolated: + dist_args["script_args"] = ["--no-user-cfg"] + + d = Distribution(dist_args) + d.parse_config_files() + obj = None # type: Optional[DistutilsCommand] + obj = d.get_command_obj('install', create=True) + assert obj is not None + i = cast(distutils_install_command, obj) + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + assert not (user and prefix), "user={} prefix={}".format(user, prefix) + assert not (home and prefix), "home={} prefix={}".format(home, prefix) + i.user = user or i.user + if user or home: + i.prefix = "" + i.prefix = prefix or i.prefix + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + + scheme = {} + for key in SCHEME_KEYS: + scheme[key] = getattr(i, 'install_' + key) + + # install_lib specified in setup.cfg should install *everything* + # into there (i.e. it takes precedence over both purelib and + # platlib). Note, i.install_lib is *always* set after + # finalize_options(); we only want to override here if the user + # has explicitly requested it hence going back to the config + if 'install_lib' in d.get_option_dict('install'): + scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + + if running_under_virtualenv(): + scheme['headers'] = os.path.join( + sys.prefix, + 'include', + 'site', + 'python{}'.format(get_major_minor_version()), + dist_name, + ) + + if root is not None: + path_no_drive = os.path.splitdrive( + os.path.abspath(scheme["headers"]))[1] + scheme["headers"] = os.path.join( + root, + path_no_drive[1:], + ) + + return scheme + + +def get_scheme( + dist_name, # type: str + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + isolated=False, # type: bool + prefix=None, # type: Optional[str] +): + # type: (...) -> Scheme + """ + Get the "scheme" corresponding to the input parameters. The distutils + documentation provides the context for the available schemes: + https://docs.python.org/3/install/index.html#alternate-installation + + :param dist_name: the name of the package to retrieve the scheme for, used + in the headers scheme path + :param user: indicates to use the "user" scheme + :param home: indicates to use the "home" scheme and provides the base + directory for the same + :param root: root under which other directories are re-based + :param isolated: equivalent to --no-user-cfg, i.e. do not consider + ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for + scheme paths + :param prefix: indicates to use the "prefix" scheme and provides the + base directory for the same + """ + scheme = distutils_scheme( + dist_name, user, home, root, isolated, prefix + ) + return Scheme( + platlib=scheme["platlib"], + purelib=scheme["purelib"], + headers=scheme["headers"], + scripts=scheme["scripts"], + data=scheme["data"], + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/main.py b/backend/test/lib/python3.8/site-packages/pip/_internal/main.py new file mode 100644 index 0000000000000000000000000000000000000000..3208d5b8820eadf8a1ebe4851c984c6033c289bd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/main.py @@ -0,0 +1,16 @@ +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, List + + +def main(args=None): + # type: (Optional[List[str]]) -> int + """This is preserved for old console scripts that may still be referencing + it. + + For additional details, see https://github.com/pypa/pip/issues/7498. + """ + from pip._internal.utils.entrypoints import _wrapper + + return _wrapper(args) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7855226e4b500142deef8fb247cd33a9a991d122 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__init__.py @@ -0,0 +1,2 @@ +"""A package that contains models that represent entities. +""" diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f02e02ba7df681e1e6cde022c6de7f58c201077c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/candidate.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/candidate.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38bc9ce9a347e8a1af8a522e1188f546d577b78d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/candidate.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/format_control.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/format_control.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d592782dfdcd9372686df70cddc2a68c3b837b60 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/format_control.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/index.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/index.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c45bbddefe151ebfc19be9f183d098ecf7649df8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/index.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/link.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/link.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54173877c9afec6fdc29c87e7f7ecea15ae43e5b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/link.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/scheme.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/scheme.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..83dfe3e34acd8d30e4c5c1b7e7716e0d8a1a5303 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/scheme.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b90ce6eaba6b7a8f8dfc78f3ecc8b33f7211a6d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6aa7a0a4e05f172e938ef4700a579fb61507aed3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/target_python.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/target_python.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..19171cfb282f1b50c0f6b4490e1bf81f1d6e26c3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/target_python.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/wheel.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/wheel.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9196dab1d415c23c8dc4097e355a825e08086570 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/models/__pycache__/wheel.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/candidate.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/candidate.py new file mode 100644 index 0000000000000000000000000000000000000000..1dc1a576eea788c23f5722bbb8e10ae950ef38bd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/candidate.py @@ -0,0 +1,36 @@ +from pip._vendor.packaging.version import parse as parse_version + +from pip._internal.utils.models import KeyBasedCompareMixin +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._vendor.packaging.version import _BaseVersion + from pip._internal.models.link import Link + + +class InstallationCandidate(KeyBasedCompareMixin): + """Represents a potential "candidate" for installation. + """ + + def __init__(self, name, version, link): + # type: (str, str, Link) -> None + self.name = name + self.version = parse_version(version) # type: _BaseVersion + self.link = link + + super(InstallationCandidate, self).__init__( + key=(self.name, self.version, self.link), + defining_class=InstallationCandidate + ) + + def __repr__(self): + # type: () -> str + return "<InstallationCandidate({!r}, {!r}, {!r})>".format( + self.name, self.version, self.link, + ) + + def __str__(self): + # type: () -> str + return '{!r} candidate (version {} at {})'.format( + self.name, self.version, self.link, + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/format_control.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/format_control.py new file mode 100644 index 0000000000000000000000000000000000000000..2e13727ca006977f3fb2df30fd1a25bb1670cf3e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/format_control.py @@ -0,0 +1,84 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal.exceptions import CommandError +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, Set, FrozenSet + + +class FormatControl(object): + """Helper for managing formats from which a package can be installed. + """ + + def __init__(self, no_binary=None, only_binary=None): + # type: (Optional[Set[str]], Optional[Set[str]]) -> None + if no_binary is None: + no_binary = set() + if only_binary is None: + only_binary = set() + + self.no_binary = no_binary + self.only_binary = only_binary + + def __eq__(self, other): + # type: (object) -> bool + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + # type: (object) -> bool + return not self.__eq__(other) + + def __repr__(self): + # type: () -> str + return "{}({}, {})".format( + self.__class__.__name__, + self.no_binary, + self.only_binary + ) + + @staticmethod + def handle_mutual_excludes(value, target, other): + # type: (str, Optional[Set[str]], Optional[Set[str]]) -> None + if value.startswith('-'): + raise CommandError( + "--no-binary / --only-binary option requires 1 argument." + ) + new = value.split(',') + while ':all:' in new: + other.clear() + target.clear() + target.add(':all:') + del new[:new.index(':all:') + 1] + # Without a none, we want to discard everything as :all: covers it + if ':none:' not in new: + return + for name in new: + if name == ':none:': + target.clear() + continue + name = canonicalize_name(name) + other.discard(name) + target.add(name) + + def get_allowed_formats(self, canonical_name): + # type: (str) -> FrozenSet[str] + result = {"binary", "source"} + if canonical_name in self.only_binary: + result.discard('source') + elif canonical_name in self.no_binary: + result.discard('binary') + elif ':all:' in self.only_binary: + result.discard('source') + elif ':all:' in self.no_binary: + result.discard('binary') + return frozenset(result) + + def disallow_binaries(self): + # type: () -> None + self.handle_mutual_excludes( + ':all:', self.no_binary, self.only_binary, + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/index.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/index.py new file mode 100644 index 0000000000000000000000000000000000000000..ead1efbda761ebed373700ce9e69797838c2b9d9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/index.py @@ -0,0 +1,31 @@ +from pip._vendor.six.moves.urllib import parse as urllib_parse + + +class PackageIndex(object): + """Represents a Package Index and provides easier access to endpoints + """ + + def __init__(self, url, file_storage_domain): + # type: (str, str) -> None + super(PackageIndex, self).__init__() + self.url = url + self.netloc = urllib_parse.urlsplit(url).netloc + self.simple_url = self._url_for_path('simple') + self.pypi_url = self._url_for_path('pypi') + + # This is part of a temporary hack used to block installs of PyPI + # packages which depend on external urls only necessary until PyPI can + # block such packages themselves + self.file_storage_domain = file_storage_domain + + def _url_for_path(self, path): + # type: (str) -> str + return urllib_parse.urljoin(self.url, path) + + +PyPI = PackageIndex( + 'https://pypi.org/', file_storage_domain='files.pythonhosted.org' +) +TestPyPI = PackageIndex( + 'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org' +) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/link.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/link.py new file mode 100644 index 0000000000000000000000000000000000000000..34fbcbfe7e4dcc6873288db8455890ce77405405 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/link.py @@ -0,0 +1,227 @@ +import os +import posixpath +import re + +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.utils.filetypes import WHEEL_EXTENSION +from pip._internal.utils.misc import ( + redact_auth_from_url, + split_auth_from_netloc, + splitext, +) +from pip._internal.utils.models import KeyBasedCompareMixin +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url, url_to_path + +if MYPY_CHECK_RUNNING: + from typing import Optional, Text, Tuple, Union + from pip._internal.index.collector import HTMLPage + from pip._internal.utils.hashes import Hashes + + +class Link(KeyBasedCompareMixin): + """Represents a parsed link from a Package Index's simple URL + """ + + def __init__( + self, + url, # type: str + comes_from=None, # type: Optional[Union[str, HTMLPage]] + requires_python=None, # type: Optional[str] + yanked_reason=None, # type: Optional[Text] + ): + # type: (...) -> None + """ + :param url: url of the resource pointed to (href of the link) + :param comes_from: instance of HTMLPage where the link was found, + or string. + :param requires_python: String containing the `Requires-Python` + metadata field, specified in PEP 345. This may be specified by + a data-requires-python attribute in the HTML link tag, as + described in PEP 503. + :param yanked_reason: the reason the file has been yanked, if the + file has been yanked, or None if the file hasn't been yanked. + This is the value of the "data-yanked" attribute, if present, in + a simple repository HTML link. If the file has been yanked but + no reason was provided, this should be the empty string. See + PEP 592 for more information and the specification. + """ + + # url can be a UNC windows share + if url.startswith('\\\\'): + url = path_to_url(url) + + self._parsed_url = urllib_parse.urlsplit(url) + # Store the url as a private attribute to prevent accidentally + # trying to set a new value. + self._url = url + + self.comes_from = comes_from + self.requires_python = requires_python if requires_python else None + self.yanked_reason = yanked_reason + + super(Link, self).__init__(key=url, defining_class=Link) + + def __str__(self): + # type: () -> str + if self.requires_python: + rp = ' (requires-python:%s)' % self.requires_python + else: + rp = '' + if self.comes_from: + return '%s (from %s)%s' % (redact_auth_from_url(self._url), + self.comes_from, rp) + else: + return redact_auth_from_url(str(self._url)) + + def __repr__(self): + # type: () -> str + return '<Link %s>' % self + + @property + def url(self): + # type: () -> str + return self._url + + @property + def filename(self): + # type: () -> str + path = self.path.rstrip('/') + name = posixpath.basename(path) + if not name: + # Make sure we don't leak auth information if the netloc + # includes a username and password. + netloc, user_pass = split_auth_from_netloc(self.netloc) + return netloc + + name = urllib_parse.unquote(name) + assert name, ('URL %r produced no filename' % self._url) + return name + + @property + def file_path(self): + # type: () -> str + return url_to_path(self.url) + + @property + def scheme(self): + # type: () -> str + return self._parsed_url.scheme + + @property + def netloc(self): + # type: () -> str + """ + This can contain auth information. + """ + return self._parsed_url.netloc + + @property + def path(self): + # type: () -> str + return urllib_parse.unquote(self._parsed_url.path) + + def splitext(self): + # type: () -> Tuple[str, str] + return splitext(posixpath.basename(self.path.rstrip('/'))) + + @property + def ext(self): + # type: () -> str + return self.splitext()[1] + + @property + def url_without_fragment(self): + # type: () -> str + scheme, netloc, path, query, fragment = self._parsed_url + return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) + + _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') + + @property + def egg_fragment(self): + # type: () -> Optional[str] + match = self._egg_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') + + @property + def subdirectory_fragment(self): + # type: () -> Optional[str] + match = self._subdirectory_fragment_re.search(self._url) + if not match: + return None + return match.group(1) + + _hash_re = re.compile( + r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)' + ) + + @property + def hash(self): + # type: () -> Optional[str] + match = self._hash_re.search(self._url) + if match: + return match.group(2) + return None + + @property + def hash_name(self): + # type: () -> Optional[str] + match = self._hash_re.search(self._url) + if match: + return match.group(1) + return None + + @property + def show_url(self): + # type: () -> str + return posixpath.basename(self._url.split('#', 1)[0].split('?', 1)[0]) + + @property + def is_file(self): + # type: () -> bool + return self.scheme == 'file' + + def is_existing_dir(self): + # type: () -> bool + return self.is_file and os.path.isdir(self.file_path) + + @property + def is_wheel(self): + # type: () -> bool + return self.ext == WHEEL_EXTENSION + + @property + def is_vcs(self): + # type: () -> bool + from pip._internal.vcs import vcs + + return self.scheme in vcs.all_schemes + + @property + def is_yanked(self): + # type: () -> bool + return self.yanked_reason is not None + + @property + def has_hash(self): + # type: () -> bool + return self.hash_name is not None + + def is_hash_allowed(self, hashes): + # type: (Optional[Hashes]) -> bool + """ + Return True if the link has a hash and it is allowed. + """ + if hashes is None or not self.has_hash: + return False + # Assert non-None so mypy knows self.hash_name and self.hash are str. + assert self.hash_name is not None + assert self.hash is not None + + return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/scheme.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/scheme.py new file mode 100644 index 0000000000000000000000000000000000000000..af07b4078f997b5c6005c042ac178282c49fd5e7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/scheme.py @@ -0,0 +1,25 @@ +""" +For types associated with installation schemes. + +For a general overview of available schemes and their context, see +https://docs.python.org/3/install/index.html#alternate-installation. +""" + + +class Scheme(object): + """A Scheme holds paths which are used as the base directories for + artifacts associated with a Python package. + """ + def __init__( + self, + platlib, # type: str + purelib, # type: str + headers, # type: str + scripts, # type: str + data, # type: str + ): + self.platlib = platlib + self.purelib = purelib + self.headers = headers + self.scripts = scripts + self.data = data diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/search_scope.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/search_scope.py new file mode 100644 index 0000000000000000000000000000000000000000..138d1b6eedf8d5b58f25c821ac393408d1e73067 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/search_scope.py @@ -0,0 +1,114 @@ +import itertools +import logging +import os +import posixpath + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.models.index import PyPI +from pip._internal.utils.compat import has_tls +from pip._internal.utils.misc import normalize_path, redact_auth_from_url +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + + +logger = logging.getLogger(__name__) + + +class SearchScope(object): + + """ + Encapsulates the locations that pip is configured to search. + """ + + @classmethod + def create( + cls, + find_links, # type: List[str] + index_urls, # type: List[str] + ): + # type: (...) -> SearchScope + """ + Create a SearchScope object after normalizing the `find_links`. + """ + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + built_find_links = [] # type: List[str] + for link in find_links: + if link.startswith('~'): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + built_find_links.append(link) + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not has_tls(): + for link in itertools.chain(index_urls, built_find_links): + parsed = urllib_parse.urlparse(link) + if parsed.scheme == 'https': + logger.warning( + 'pip is configured with locations that require ' + 'TLS/SSL, however the ssl module in Python is not ' + 'available.' + ) + break + + return cls( + find_links=built_find_links, + index_urls=index_urls, + ) + + def __init__( + self, + find_links, # type: List[str] + index_urls, # type: List[str] + ): + # type: (...) -> None + self.find_links = find_links + self.index_urls = index_urls + + def get_formatted_locations(self): + # type: () -> str + lines = [] + if self.index_urls and self.index_urls != [PyPI.simple_url]: + lines.append( + 'Looking in indexes: {}'.format(', '.join( + redact_auth_from_url(url) for url in self.index_urls)) + ) + if self.find_links: + lines.append( + 'Looking in links: {}'.format(', '.join( + redact_auth_from_url(url) for url in self.find_links)) + ) + return '\n'.join(lines) + + def get_index_urls_locations(self, project_name): + # type: (str) -> List[str] + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url): + # type: (str) -> str + loc = posixpath.join( + url, + urllib_parse.quote(canonicalize_name(project_name))) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith('/'): + loc = loc + '/' + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py new file mode 100644 index 0000000000000000000000000000000000000000..f58fdce9cdfcb9320c09f0652ff20a9dc52f3701 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py @@ -0,0 +1,47 @@ +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + from pip._internal.models.format_control import FormatControl + + +class SelectionPreferences(object): + + """ + Encapsulates the candidate selection preferences for downloading + and installing files. + """ + + # Don't include an allow_yanked default value to make sure each call + # site considers whether yanked releases are allowed. This also causes + # that decision to be made explicit in the calling code, which helps + # people when reading the code. + def __init__( + self, + allow_yanked, # type: bool + allow_all_prereleases=False, # type: bool + format_control=None, # type: Optional[FormatControl] + prefer_binary=False, # type: bool + ignore_requires_python=None, # type: Optional[bool] + ): + # type: (...) -> None + """Create a SelectionPreferences object. + + :param allow_yanked: Whether files marked as yanked (in the sense + of PEP 592) are permitted to be candidates for install. + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + :param prefer_binary: Whether to prefer an old, but valid, binary + dist over a new source dist. + :param ignore_requires_python: Whether to ignore incompatible + "Requires-Python" values in links. Defaults to False. + """ + if ignore_requires_python is None: + ignore_requires_python = False + + self.allow_yanked = allow_yanked + self.allow_all_prereleases = allow_all_prereleases + self.format_control = format_control + self.prefer_binary = prefer_binary + self.ignore_requires_python = ignore_requires_python diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/target_python.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/target_python.py new file mode 100644 index 0000000000000000000000000000000000000000..97ae85a0945b88e63db603fbeb4d49bdc339fa6a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/target_python.py @@ -0,0 +1,107 @@ +import sys + +from pip._internal.pep425tags import get_supported, version_info_to_nodot +from pip._internal.utils.misc import normalize_version_info +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Tuple + + from pip._vendor.packaging.tags import Tag + + +class TargetPython(object): + + """ + Encapsulates the properties of a Python interpreter one is targeting + for a package install, download, etc. + """ + + def __init__( + self, + platform=None, # type: Optional[str] + py_version_info=None, # type: Optional[Tuple[int, ...]] + abi=None, # type: Optional[str] + implementation=None, # type: Optional[str] + ): + # type: (...) -> None + """ + :param platform: A string or None. If None, searches for packages + that are supported by the current system. Otherwise, will find + packages that can be built on the platform passed in. These + packages will only be downloaded for distribution: they will + not be built locally. + :param py_version_info: An optional tuple of ints representing the + Python version information to use (e.g. `sys.version_info[:3]`). + This can have length 1, 2, or 3 when provided. + :param abi: A string or None. This is passed to pep425tags.py's + get_supported() function as is. + :param implementation: A string or None. This is passed to + pep425tags.py's get_supported() function as is. + """ + # Store the given py_version_info for when we call get_supported(). + self._given_py_version_info = py_version_info + + if py_version_info is None: + py_version_info = sys.version_info[:3] + else: + py_version_info = normalize_version_info(py_version_info) + + py_version = '.'.join(map(str, py_version_info[:2])) + + self.abi = abi + self.implementation = implementation + self.platform = platform + self.py_version = py_version + self.py_version_info = py_version_info + + # This is used to cache the return value of get_tags(). + self._valid_tags = None # type: Optional[List[Tag]] + + def format_given(self): + # type: () -> str + """ + Format the given, non-None attributes for display. + """ + display_version = None + if self._given_py_version_info is not None: + display_version = '.'.join( + str(part) for part in self._given_py_version_info + ) + + key_values = [ + ('platform', self.platform), + ('version_info', display_version), + ('abi', self.abi), + ('implementation', self.implementation), + ] + return ' '.join( + '{}={!r}'.format(key, value) for key, value in key_values + if value is not None + ) + + def get_tags(self): + # type: () -> List[Tag] + """ + Return the supported PEP 425 tags to check wheel candidates against. + + The tags are returned in order of preference (most preferred first). + """ + if self._valid_tags is None: + # Pass versions=None if no py_version_info was given since + # versions=None uses special default logic. + py_version_info = self._given_py_version_info + if py_version_info is None: + version = None + else: + version = version_info_to_nodot(py_version_info) + + tags = get_supported( + version=version, + platform=self.platform, + abi=self.abi, + impl=self.implementation, + ) + self._valid_tags = tags + + return self._valid_tags diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/models/wheel.py b/backend/test/lib/python3.8/site-packages/pip/_internal/models/wheel.py new file mode 100644 index 0000000000000000000000000000000000000000..34d8c2ec3c024f9638946e1dedc55edbd8029473 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/models/wheel.py @@ -0,0 +1,78 @@ +"""Represents a wheel file and provides access to the various parts of the +name that have meaning. +""" +import re + +from pip._vendor.packaging.tags import Tag + +from pip._internal.exceptions import InvalidWheelFilename +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + + +class Wheel(object): + """A wheel file""" + + wheel_file_re = re.compile( + r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?)) + ((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?) + \.whl|\.dist-info)$""", + re.VERBOSE + ) + + def __init__(self, filename): + # type: (str) -> None + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename( + "%s is not a valid wheel filename." % filename + ) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group('ver').replace('_', '-') + self.build_tag = wheel_info.group('build') + self.pyversions = wheel_info.group('pyver').split('.') + self.abis = wheel_info.group('abi').split('.') + self.plats = wheel_info.group('plat').split('.') + + # All the tag combinations from this file + self.file_tags = { + Tag(x, y, z) for x in self.pyversions + for y in self.abis for z in self.plats + } + + def get_formatted_file_tags(self): + # type: () -> List[str] + """Return the wheel's tags as a sorted list of strings.""" + return sorted(str(tag) for tag in self.file_tags) + + def support_index_min(self, tags): + # type: (List[Tag]) -> int + """Return the lowest index that one of the wheel's file_tag combinations + achieves in the given list of supported tags. + + For example, if there are 8 supported tags and one of the file tags + is first in the list, then return 0. + + :param tags: the PEP 425 tags to check the wheel against, in order + with most preferred first. + + :raises ValueError: If none of the wheel's file tags match one of + the supported tags. + """ + return min(tags.index(tag) for tag in self.file_tags if tag in tags) + + def supported(self, tags): + # type: (List[Tag]) -> bool + """Return whether the wheel is compatible with one of the given tags. + + :param tags: the PEP 425 tags to check the wheel against. + """ + return not self.file_tags.isdisjoint(tags) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b51bde91b2e5b4e557ed9b70fc113843cc3d49ae --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__init__.py @@ -0,0 +1,2 @@ +"""Contains purely network-related utilities. +""" diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa8fcaf34cf5cbaa4c8bb93fad8cde467d275d82 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/auth.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/auth.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3081375b94c8b36d4726def9c1995a64e4712b49 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/auth.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/cache.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/cache.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b1e5045f555f4ca53f5f520a3dbf1680f9208ecc Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/cache.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/download.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/download.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f0583a8d41d019d96b9d4482770ad5cf3dc84e4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/download.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/session.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/session.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8461c444a12458e1f274000904eac9902dd34804 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/session.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/utils.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..50d6762e96ebc9f738e826144b47f6b9678eadcc Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/utils.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2fd5c422495522cb56bb05c4196a4e99029eccdb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/auth.py b/backend/test/lib/python3.8/site-packages/pip/_internal/network/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..1e1da54ca59d8d42b53b51f95b876b369f76b4a1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/network/auth.py @@ -0,0 +1,298 @@ +"""Network Authentication Helpers + +Contains interface (MultiDomainBasicAuth) and associated glue code for +providing credentials in the context of network requests. +""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +import logging + +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.utils import get_netrc_auth +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.utils.misc import ( + ask, + ask_input, + ask_password, + remove_auth_from_url, + split_auth_netloc_from_url, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import Dict, Optional, Tuple + + from pip._internal.vcs.versioncontrol import AuthInfo + + Credentials = Tuple[str, str, str] + +logger = logging.getLogger(__name__) + +try: + import keyring # noqa +except ImportError: + keyring = None +except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", str(exc), + ) + keyring = None + + +def get_keyring_auth(url, username): + """Return the tuple auth for a given url from keyring.""" + if not url or not keyring: + return None + + try: + try: + get_credential = keyring.get_credential + except AttributeError: + pass + else: + logger.debug("Getting credentials from keyring for %s", url) + cred = get_credential(url, username) + if cred is not None: + return cred.username, cred.password + return None + + if username: + logger.debug("Getting password from keyring for %s", url) + password = keyring.get_password(url, username) + if password: + return username, password + + except Exception as exc: + logger.warning( + "Keyring is skipped due to an exception: %s", str(exc), + ) + + +class MultiDomainBasicAuth(AuthBase): + + def __init__(self, prompting=True, index_urls=None): + # type: (bool, Optional[Values]) -> None + self.prompting = prompting + self.index_urls = index_urls + self.passwords = {} # type: Dict[str, AuthInfo] + # When the user is prompted to enter credentials and keyring is + # available, we will offer to save them. If the user accepts, + # this value is set to the credentials they entered. After the + # request authenticates, the caller should call + # ``save_credentials`` to save these. + self._credentials_to_save = None # type: Optional[Credentials] + + def _get_index_url(self, url): + """Return the original index URL matching the requested URL. + + Cached or dynamically generated credentials may work against + the original index URL rather than just the netloc. + + The provided url should have had its username and password + removed already. If the original index url had credentials then + they will be included in the return value. + + Returns None if no matching index was found, or if --no-index + was specified by the user. + """ + if not url or not self.index_urls: + return None + + for u in self.index_urls: + prefix = remove_auth_from_url(u).rstrip("/") + "/" + if url.startswith(prefix): + return u + + def _get_new_credentials(self, original_url, allow_netrc=True, + allow_keyring=True): + """Find and return credentials for the specified URL.""" + # Split the credentials and netloc from the url. + url, netloc, url_user_password = split_auth_netloc_from_url( + original_url, + ) + + # Start with the credentials embedded in the url + username, password = url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in url for %s", netloc) + return url_user_password + + # Find a matching index url for this request + index_url = self._get_index_url(url) + if index_url: + # Split the credentials from the url. + index_info = split_auth_netloc_from_url(index_url) + if index_info: + index_url, _, index_url_user_password = index_info + logger.debug("Found index url %s", index_url) + + # If an index URL was found, try its embedded credentials + if index_url and index_url_user_password[0] is not None: + username, password = index_url_user_password + if username is not None and password is not None: + logger.debug("Found credentials in index url for %s", netloc) + return index_url_user_password + + # Get creds from netrc if we still don't have them + if allow_netrc: + netrc_auth = get_netrc_auth(original_url) + if netrc_auth: + logger.debug("Found credentials in netrc for %s", netloc) + return netrc_auth + + # If we don't have a password and keyring is available, use it. + if allow_keyring: + # The index url is more specific than the netloc, so try it first + kr_auth = ( + get_keyring_auth(index_url, username) or + get_keyring_auth(netloc, username) + ) + if kr_auth: + logger.debug("Found credentials in keyring for %s", netloc) + return kr_auth + + return username, password + + def _get_url_and_credentials(self, original_url): + """Return the credentials to use for the provided URL. + + If allowed, netrc and keyring may be used to obtain the + correct credentials. + + Returns (url_without_credentials, username, password). Note + that even if the original URL contains credentials, this + function may return a different username and password. + """ + url, netloc, _ = split_auth_netloc_from_url(original_url) + + # Use any stored credentials that we have for this netloc + username, password = self.passwords.get(netloc, (None, None)) + + if username is None and password is None: + # No stored credentials. Acquire new credentials without prompting + # the user. (e.g. from netrc, keyring, or the URL itself) + username, password = self._get_new_credentials(original_url) + + if username is not None or password is not None: + # Convert the username and password if they're None, so that + # this netloc will show up as "cached" in the conditional above. + # Further, HTTPBasicAuth doesn't accept None, so it makes sense to + # cache the value that is going to be used. + username = username or "" + password = password or "" + + # Store any acquired credentials. + self.passwords[netloc] = (username, password) + + assert ( + # Credentials were found + (username is not None and password is not None) or + # Credentials were not found + (username is None and password is None) + ), "Could not load credentials from url: {}".format(original_url) + + return url, username, password + + def __call__(self, req): + # Get credentials for this request + url, username, password = self._get_url_and_credentials(req.url) + + # Set the url of the request to the url without any credentials + req.url = url + + if username is not None and password is not None: + # Send the basic auth with this request + req = HTTPBasicAuth(username, password)(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + # Factored out to allow for easy patching in tests + def _prompt_for_password(self, netloc): + username = ask_input("User for %s: " % netloc) + if not username: + return None, None + auth = get_keyring_auth(netloc, username) + if auth: + return auth[0], auth[1], False + password = ask_password("Password: ") + return username, password, True + + # Factored out to allow for easy patching in tests + def _should_save_password_to_keyring(self): + if not keyring: + return False + return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y" + + def handle_401(self, resp, **kwargs): + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simply return the response + if not self.prompting: + return resp + + parsed = urllib_parse.urlparse(resp.url) + + # Prompt the user for a new username and password + username, password, save = self._prompt_for_password(parsed.netloc) + + # Store the new username and password to use for future requests + self._credentials_to_save = None + if username is not None and password is not None: + self.passwords[parsed.netloc] = (username, password) + + # Prompt to save the password to keyring + if save and self._should_save_password_to_keyring(): + self._credentials_to_save = (parsed.netloc, username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + req.register_hook("response", self.warn_on_401) + + # On successful request, save the credentials that were used to + # keyring. (Note that if the user responded "no" above, this member + # is not set and nothing will be saved.) + if self._credentials_to_save: + req.register_hook("response", self.save_credentials) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def warn_on_401(self, resp, **kwargs): + """Response callback to warn about incorrect credentials.""" + if resp.status_code == 401: + logger.warning( + '401 Error, Credentials not correct for %s', resp.request.url, + ) + + def save_credentials(self, resp, **kwargs): + """Response callback to save credentials on success.""" + assert keyring is not None, "should never reach here without keyring" + if not keyring: + return + + creds = self._credentials_to_save + self._credentials_to_save = None + if creds and resp.status_code < 400: + try: + logger.info('Saving credentials to keyring') + keyring.set_password(*creds) + except Exception: + logger.exception('Failed to save credentials') diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/cache.py b/backend/test/lib/python3.8/site-packages/pip/_internal/network/cache.py new file mode 100644 index 0000000000000000000000000000000000000000..c9386e173600d58dacda2061f49d747de386a50a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/network/cache.py @@ -0,0 +1,81 @@ +"""HTTP cache implementation. +""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +import os +from contextlib import contextmanager + +from pip._vendor.cachecontrol.cache import BaseCache +from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.requests.models import Response + +from pip._internal.utils.filesystem import adjacent_tmp_file, replace +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional + + +def is_from_cache(response): + # type: (Response) -> bool + return getattr(response, "from_cache", False) + + +@contextmanager +def suppressed_cache_errors(): + """If we can't access the cache then we can just skip caching and process + requests as if caching wasn't enabled. + """ + try: + yield + except (OSError, IOError): + pass + + +class SafeFileCache(BaseCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, directory): + # type: (str) -> None + assert directory is not None, "Cache directory must not be None." + super(SafeFileCache, self).__init__() + self.directory = directory + + def _get_cache_path(self, name): + # type: (str) -> str + # From cachecontrol.caches.file_cache.FileCache._fn, brought into our + # class for backwards-compatibility and to avoid using a non-public + # method. + hashed = FileCache.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + # type: (str) -> Optional[bytes] + path = self._get_cache_path(key) + with suppressed_cache_errors(): + with open(path, 'rb') as f: + return f.read() + + def set(self, key, value): + # type: (str, bytes) -> None + path = self._get_cache_path(key) + with suppressed_cache_errors(): + ensure_dir(os.path.dirname(path)) + + with adjacent_tmp_file(path) as f: + f.write(value) + + replace(f.name, path) + + def delete(self, key): + # type: (str) -> None + path = self._get_cache_path(key) + with suppressed_cache_errors(): + os.remove(path) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/download.py b/backend/test/lib/python3.8/site-packages/pip/_internal/network/download.py new file mode 100644 index 0000000000000000000000000000000000000000..c90c4bf42cfe25c7c417c3776b7d5844417b9186 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/network/download.py @@ -0,0 +1,200 @@ +"""Download files with progress indicators. +""" +import cgi +import logging +import mimetypes +import os + +from pip._vendor import requests +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE + +from pip._internal.models.index import PyPI +from pip._internal.network.cache import is_from_cache +from pip._internal.network.utils import response_chunks +from pip._internal.utils.misc import ( + format_size, + redact_auth_from_url, + splitext, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.ui import DownloadProgressProvider + +if MYPY_CHECK_RUNNING: + from typing import Iterable, Optional + + from pip._vendor.requests.models import Response + + from pip._internal.models.link import Link + from pip._internal.network.session import PipSession + +logger = logging.getLogger(__name__) + + +def _get_http_response_size(resp): + # type: (Response) -> Optional[int] + try: + return int(resp.headers['content-length']) + except (ValueError, KeyError, TypeError): + return None + + +def _prepare_download( + resp, # type: Response + link, # type: Link + progress_bar # type: str +): + # type: (...) -> Iterable[bytes] + total_length = _get_http_response_size(resp) + + if link.netloc == PyPI.file_storage_domain: + url = link.show_url + else: + url = link.url_without_fragment + + logged_url = redact_auth_from_url(url) + + if total_length: + logged_url = '{} ({})'.format(logged_url, format_size(total_length)) + + if is_from_cache(resp): + logger.info("Using cached %s", logged_url) + else: + logger.info("Downloading %s", logged_url) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif is_from_cache(resp): + show_progress = False + elif not total_length: + show_progress = True + elif total_length > (40 * 1000): + show_progress = True + else: + show_progress = False + + chunks = response_chunks(resp, CONTENT_CHUNK_SIZE) + + if not show_progress: + return chunks + + return DownloadProgressProvider( + progress_bar, max=total_length + )(chunks) + + +def sanitize_content_filename(filename): + # type: (str) -> str + """ + Sanitize the "filename" value from a Content-Disposition header. + """ + return os.path.basename(filename) + + +def parse_content_disposition(content_disposition, default_filename): + # type: (str, str) -> str + """ + Parse the "filename" value from a Content-Disposition header, and + return the default filename if the result is empty. + """ + _type, params = cgi.parse_header(content_disposition) + filename = params.get('filename') + if filename: + # We need to sanitize the filename to prevent directory traversal + # in case the filename contains ".." path parts. + filename = sanitize_content_filename(filename) + return filename or default_filename + + +def _get_http_response_filename(resp, link): + # type: (Response, Link) -> str + """Get an ideal filename from the given HTTP response, falling back to + the link filename if not provided. + """ + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get('content-disposition') + if content_disposition: + filename = parse_content_disposition(content_disposition, filename) + ext = splitext(filename)[1] # type: Optional[str] + if not ext: + ext = mimetypes.guess_extension( + resp.headers.get('content-type', '') + ) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + return filename + + +def _http_get_download(session, link): + # type: (PipSession, Link) -> Response + target_url = link.url.split('#', 1)[0] + resp = session.get( + target_url, + # We use Accept-Encoding: identity here because requests + # defaults to accepting compressed responses. This breaks in + # a variety of ways depending on how the server is configured. + # - Some servers will notice that the file isn't a compressible + # file and will leave the file alone and with an empty + # Content-Encoding + # - Some servers will notice that the file is already + # compressed and will leave the file alone and will add a + # Content-Encoding: gzip header + # - Some servers won't notice anything at all and will take + # a file that's already been compressed and compress it again + # and set the Content-Encoding: gzip header + # By setting this to request only the identity encoding We're + # hoping to eliminate the third case. Hopefully there does not + # exist a server which when given a file will notice it is + # already compressed and that you're not asking for a + # compressed file and will then decompress it before sending + # because if that's the case I don't think it'll ever be + # possible to make this work. + headers={"Accept-Encoding": "identity"}, + stream=True, + ) + resp.raise_for_status() + return resp + + +class Download(object): + def __init__( + self, + response, # type: Response + filename, # type: str + chunks, # type: Iterable[bytes] + ): + # type: (...) -> None + self.response = response + self.filename = filename + self.chunks = chunks + + +class Downloader(object): + def __init__( + self, + session, # type: PipSession + progress_bar, # type: str + ): + # type: (...) -> None + self._session = session + self._progress_bar = progress_bar + + def __call__(self, link): + # type: (Link) -> Download + try: + resp = _http_get_download(self._session, link) + except requests.HTTPError as e: + logger.critical( + "HTTP error %s while getting %s", e.response.status_code, link + ) + raise + + return Download( + resp, + _get_http_response_filename(resp, link), + _prepare_download(resp, link, self._progress_bar), + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/session.py b/backend/test/lib/python3.8/site-packages/pip/_internal/network/session.py new file mode 100644 index 0000000000000000000000000000000000000000..f5eb15ef2f6245ee303b8f6297eb8c460945afca --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/network/session.py @@ -0,0 +1,405 @@ +"""PipSession and supporting code, containing all pip-specific +network request configuration and behavior. +""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +import email.utils +import json +import logging +import mimetypes +import os +import platform +import sys +import warnings + +from pip._vendor import requests, six, urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter +from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter +from pip._vendor.requests.models import Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.urllib3.exceptions import InsecureRequestWarning + +from pip import __version__ +from pip._internal.network.auth import MultiDomainBasicAuth +from pip._internal.network.cache import SafeFileCache +# Import ssl from compat so the initial import occurs in only one place. +from pip._internal.utils.compat import has_tls, ipaddress +from pip._internal.utils.glibc import libc_ver +from pip._internal.utils.misc import ( + build_url_from_netloc, + get_installed_version, + parse_netloc, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import url_to_path + +if MYPY_CHECK_RUNNING: + from typing import ( + Iterator, List, Optional, Tuple, Union, + ) + + from pip._internal.models.link import Link + + SecureOrigin = Tuple[str, str, Optional[Union[int, str]]] + + +logger = logging.getLogger(__name__) + + +# Ignore warning raised when using --trusted-host. +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +SECURE_ORIGINS = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] # type: List[SecureOrigin] + + +# These are environment variables present when running under various +# CI systems. For each variable, some CI systems that use the variable +# are indicated. The collection was chosen so that for each of a number +# of popular systems, at least one of the environment variables is used. +# This list is used to provide some indication of and lower bound for +# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive. +# For more background, see: https://github.com/pypa/pip/issues/5499 +CI_ENVIRONMENT_VARIABLES = ( + # Azure Pipelines + 'BUILD_BUILDID', + # Jenkins + 'BUILD_ID', + # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI + 'CI', + # Explicit environment variable. + 'PIP_IS_CI', +) + + +def looks_like_ci(): + # type: () -> bool + """ + Return whether it looks like pip is running under CI. + """ + # We don't use the method of checking for a tty (e.g. using isatty()) + # because some CI systems mimic a tty (e.g. Travis CI). Thus that + # method doesn't provide definitive information in either direction. + return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES) + + +def user_agent(): + """ + Return a string representing the user agent. + """ + data = { + "installer": {"name": "pip", "version": __version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == 'CPython': + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'PyPy': + if sys.pypy_version_info.releaselevel == 'final': + pypy_version_info = sys.pypy_version_info[:3] + else: + pypy_version_info = sys.pypy_version_info + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == 'Jython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'IronPython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + from pip._vendor import distro + distro_infos = dict(filter( + lambda x: x[1], + zip(["name", "version", "id"], distro.linux_distribution()), + )) + libc = dict(filter( + lambda x: x[1], + zip(["lib", "version"], libc_ver()), + )) + if libc: + distro_infos["libc"] = libc + if distro_infos: + data["distro"] = distro_infos + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + if has_tls(): + import _ssl as ssl + data["openssl_version"] = ssl.OPENSSL_VERSION + + setuptools_version = get_installed_version("setuptools") + if setuptools_version is not None: + data["setuptools_version"] = setuptools_version + + # Use None rather than False so as not to give the impression that + # pip knows it is not being run under CI. Rather, it is a null or + # inconclusive result. Also, we include some value rather than no + # value to make it easier to know that the check has been run. + data["ci"] = True if looks_like_ci() else None + + user_data = os.environ.get("PIP_USER_AGENT_USER_DATA") + if user_data is not None: + data["user_data"] = user_data + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class LocalFSAdapter(BaseAdapter): + + def send(self, request, stream=None, timeout=None, verify=None, cert=None, + proxies=None): + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + resp.status_code = 404 + resp.raw = exc + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict({ + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + }) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self): + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + + def cert_verify(self, conn, url, verify, cert): + super(InsecureHTTPAdapter, self).cert_verify( + conn=conn, url=url, verify=False, cert=cert + ) + + +class PipSession(requests.Session): + + timeout = None # type: Optional[int] + + def __init__(self, *args, **kwargs): + """ + :param trusted_hosts: Domains not to emit warnings for when not using + HTTPS. + """ + retries = kwargs.pop("retries", 0) + cache = kwargs.pop("cache", None) + trusted_hosts = kwargs.pop("trusted_hosts", []) # type: List[str] + index_urls = kwargs.pop("index_urls", None) + + super(PipSession, self).__init__(*args, **kwargs) + + # Namespace the attribute with "pip_" just in case to prevent + # possible conflicts with the base class. + self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]] + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth(index_urls=index_urls) + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interrupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + # A 500 may indicate transient error in Amazon S3 + # A 520 or 527 - may indicate transient error in CloudFlare + status_forcelist=[500, 503, 520, 527], + + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) + + # We want to _only_ cache responses on securely fetched origins. We do + # this because we can't validate the response of an insecurely fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries) + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching (see above) so we'll use it for all http:// URLs as + # well as any https:// host that we've marked as ignoring TLS errors + # for. + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + # Save this for later use in add_insecure_host(). + self._insecure_adapter = insecure_adapter + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + for host in trusted_hosts: + self.add_trusted_host(host, suppress_logging=True) + + def add_trusted_host(self, host, source=None, suppress_logging=False): + # type: (str, Optional[str], bool) -> None + """ + :param host: It is okay to provide a host that has previously been + added. + :param source: An optional source string, for logging where the host + string came from. + """ + if not suppress_logging: + msg = 'adding trusted host: {!r}'.format(host) + if source is not None: + msg += ' (from {})'.format(source) + logger.info(msg) + + host_port = parse_netloc(host) + if host_port not in self.pip_trusted_origins: + self.pip_trusted_origins.append(host_port) + + self.mount(build_url_from_netloc(host) + '/', self._insecure_adapter) + if not host_port[1]: + # Mount wildcard ports for the same host. + self.mount( + build_url_from_netloc(host) + ':', + self._insecure_adapter + ) + + def iter_secure_origins(self): + # type: () -> Iterator[SecureOrigin] + for secure_origin in SECURE_ORIGINS: + yield secure_origin + for host, port in self.pip_trusted_origins: + yield ('*', host, '*' if port is None else port) + + def is_secure_origin(self, location): + # type: (Link) -> bool + # Determine if this url used a secure transport mechanism + parsed = urllib_parse.urlparse(str(location)) + origin_protocol, origin_host, origin_port = ( + parsed.scheme, parsed.hostname, parsed.port, + ) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + origin_protocol = origin_protocol.rsplit('+', 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in self.iter_secure_origins(): + secure_protocol, secure_host, secure_port = secure_origin + if origin_protocol != secure_protocol and secure_protocol != "*": + continue + + try: + addr = ipaddress.ip_address( + None + if origin_host is None + else six.ensure_text(origin_host) + ) + network = ipaddress.ip_network( + six.ensure_text(secure_host) + ) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if ( + origin_host and + origin_host.lower() != secure_host.lower() and + secure_host != "*" + ): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port matches. + if ( + origin_port != secure_port and + secure_port != "*" and + secure_port is not None + ): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS we " + "recommend you use HTTPS instead, otherwise you may silence " + "this warning and allow it anyway with '--trusted-host %s'.", + origin_host, + origin_host, + ) + + return False + + def request(self, method, url, *args, **kwargs): + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super(PipSession, self).request(method, url, *args, **kwargs) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/utils.py b/backend/test/lib/python3.8/site-packages/pip/_internal/network/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a19050b0f7082809f277bc74e516a9af8e537136 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/network/utils.py @@ -0,0 +1,48 @@ +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterator + + +def response_chunks(response, chunk_size=CONTENT_CHUNK_SIZE): + # type: (Response, int) -> Iterator[bytes] + """Given a requests Response, provide the data chunks. + """ + try: + # Special case for urllib3. + for chunk in response.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False, + ): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = response.raw.read(chunk_size) + if not chunk: + break + yield chunk diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/network/xmlrpc.py b/backend/test/lib/python3.8/site-packages/pip/_internal/network/xmlrpc.py new file mode 100644 index 0000000000000000000000000000000000000000..121edd93056f57c7717e6e48e2d7432cfc18ada4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/network/xmlrpc.py @@ -0,0 +1,44 @@ +"""xmlrpclib.Transport implementation +""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +import logging + +from pip._vendor import requests +# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import +from pip._vendor.six.moves import xmlrpc_client # type: ignore +from pip._vendor.six.moves.urllib import parse as urllib_parse + +logger = logging.getLogger(__name__) + + +class PipXmlrpcTransport(xmlrpc_client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + + def __init__(self, index_url, session, use_datetime=False): + xmlrpc_client.Transport.__init__(self, use_datetime) + index_parts = urllib_parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request(self, host, handler, request_body, verbose=False): + parts = (self._scheme, host, handler, None, None, None) + url = urllib_parse.urlunparse(parts) + try: + headers = {'Content-Type': 'text/xml'} + response = self._session.post(url, data=request_body, + headers=headers, stream=True) + response.raise_for_status() + self.verbose = verbose + return self.parse_response(response.raw) + except requests.HTTPError as exc: + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, url, + ) + raise diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f62c8d3ba9804f90f8a2ef13c02ea10621cf288 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..711d0c1103881f0812a2f918185a1201d64623b6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f296645aa8aa81eef443652bed5857930c00ba8d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6f56c039756635716561e1aab23a2626e918910 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41cc810999df04ded79a653ee494abd7e1bec6c4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..27bc9a996a1269102e4541eeb7a4dfdd7ee14794 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c32914b5fe7bfbe21b04446982fa9ad6087abe5d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e93c1b009caf3e569832901e8ec4dc9a81a0202c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ccf9b6c5c457cbbc2acff05ca008a010f79b7c87 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..b13fbdef93357da3d1b3b0303b49a28990736256 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py @@ -0,0 +1,40 @@ +"""Metadata generation logic for source distributions. +""" + +import logging +import os + +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._internal.build_env import BuildEnvironment + from pip._vendor.pep517.wrappers import Pep517HookCaller + +logger = logging.getLogger(__name__) + + +def generate_metadata(build_env, backend): + # type: (BuildEnvironment, Pep517HookCaller) -> str + """Generate metadata using mechanisms described in PEP 517. + + Returns the generated metadata directory. + """ + metadata_tmpdir = TempDirectory( + kind="modern-metadata", globally_managed=True + ) + + metadata_dir = metadata_tmpdir.path + + with build_env: + # Note that Pep517HookCaller implements a fallback for + # prepare_metadata_for_build_wheel, so we don't have to + # consider the possibility that this hook doesn't exist. + runner = runner_with_spinner_message("Preparing wheel metadata") + with backend.subprocess_runner(runner): + distinfo_dir = backend.prepare_metadata_for_build_wheel( + metadata_dir + ) + + return os.path.join(metadata_dir, distinfo_dir) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..b6813f89ba7dd5ea88c59dc618ddb18701ae2194 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py @@ -0,0 +1,122 @@ +"""Metadata generation logic for legacy source distributions. +""" + +import logging +import os + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.vcs import vcs + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + + from pip._internal.build_env import BuildEnvironment + +logger = logging.getLogger(__name__) + + +def _find_egg_info(source_directory, is_editable): + # type: (str, bool) -> str + """Find an .egg-info in `source_directory`, based on `is_editable`. + """ + + def looks_like_virtual_env(path): + # type: (str) -> bool + return ( + os.path.lexists(os.path.join(path, 'bin', 'python')) or + os.path.exists(os.path.join(path, 'Scripts', 'Python.exe')) + ) + + def locate_editable_egg_info(base): + # type: (str) -> List[str] + candidates = [] # type: List[str] + for root, dirs, files in os.walk(base): + for dir_ in vcs.dirnames: + if dir_ in dirs: + dirs.remove(dir_) + # Iterate over a copy of ``dirs``, since mutating + # a list while iterating over it can cause trouble. + # (See https://github.com/pypa/pip/pull/462.) + for dir_ in list(dirs): + if looks_like_virtual_env(os.path.join(root, dir_)): + dirs.remove(dir_) + # Also don't search through tests + elif dir_ == 'test' or dir_ == 'tests': + dirs.remove(dir_) + candidates.extend(os.path.join(root, dir_) for dir_ in dirs) + return [f for f in candidates if f.endswith('.egg-info')] + + def depth_of_directory(dir_): + # type: (str) -> int + return ( + dir_.count(os.path.sep) + + (os.path.altsep and dir_.count(os.path.altsep) or 0) + ) + + base = source_directory + if is_editable: + filenames = locate_editable_egg_info(base) + else: + base = os.path.join(base, 'pip-egg-info') + filenames = os.listdir(base) + + if not filenames: + raise InstallationError( + "Files/directories not found in {}".format(base) + ) + + # If we have more than one match, we pick the toplevel one. This + # can easily be the case if there is a dist folder which contains + # an extracted tarball for testing purposes. + if len(filenames) > 1: + filenames.sort(key=depth_of_directory) + + return os.path.join(base, filenames[0]) + + +def generate_metadata( + build_env, # type: BuildEnvironment + setup_py_path, # type: str + source_dir, # type: str + editable, # type: bool + isolated, # type: bool + details, # type: str +): + # type: (...) -> str + """Generate metadata using setup.py-based defacto mechanisms. + + Returns the generated metadata directory. + """ + logger.debug( + 'Running setup.py (path:%s) egg_info for package %s', + setup_py_path, details, + ) + + egg_info_dir = None # type: Optional[str] + # For non-editable installs, don't put the .egg-info files at the root, + # to avoid confusion due to the source code being considered an installed + # egg. + if not editable: + egg_info_dir = os.path.join(source_dir, 'pip-egg-info') + # setuptools complains if the target directory does not exist. + ensure_dir(egg_info_dir) + + args = make_setuptools_egg_info_args( + setup_py_path, + egg_info_dir=egg_info_dir, + no_user_config=isolated, + ) + + with build_env: + call_subprocess( + args, + cwd=source_dir, + command_desc='python setup.py egg_info', + ) + + # Return the .egg-info directory. + return _find_egg_info(source_dir, editable) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py new file mode 100644 index 0000000000000000000000000000000000000000..1266ce05c6f4fddeec7f40a00ad4d2d85f531552 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py @@ -0,0 +1,46 @@ +import logging +import os + +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + from pip._vendor.pep517.wrappers import Pep517HookCaller + +logger = logging.getLogger(__name__) + + +def build_wheel_pep517( + name, # type: str + backend, # type: Pep517HookCaller + metadata_directory, # type: str + build_options, # type: List[str] + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one InstallRequirement using the PEP 517 build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + assert metadata_directory is not None + if build_options: + # PEP 517 does not support --build-options + logger.error('Cannot build wheel for %s using PEP 517 when ' + '--build-option is present' % (name,)) + return None + try: + logger.debug('Destination directory: %s', tempd) + + runner = runner_with_spinner_message( + 'Building wheel for {} (PEP 517)'.format(name) + ) + with backend.subprocess_runner(runner): + wheel_name = backend.build_wheel( + tempd, + metadata_directory=metadata_directory, + ) + except Exception: + logger.error('Failed building wheel for %s', name) + return None + return os.path.join(tempd, wheel_name) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..3ebd9fe444bddc4bafae14af8dda297ddb98ce40 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py @@ -0,0 +1,115 @@ +import logging +import os.path + +from pip._internal.utils.setuptools_build import ( + make_setuptools_bdist_wheel_args, +) +from pip._internal.utils.subprocess import ( + LOG_DIVIDER, + call_subprocess, + format_command_args, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.ui import open_spinner + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Text + +logger = logging.getLogger(__name__) + + +def format_command_result( + command_args, # type: List[str] + command_output, # type: Text +): + # type: (...) -> str + """Format command information for logging.""" + command_desc = format_command_args(command_args) + text = 'Command arguments: {}\n'.format(command_desc) + + if not command_output: + text += 'Command output: None' + elif logger.getEffectiveLevel() > logging.DEBUG: + text += 'Command output: [use --verbose to show]' + else: + if not command_output.endswith('\n'): + command_output += '\n' + text += 'Command output:\n{}{}'.format(command_output, LOG_DIVIDER) + + return text + + +def get_legacy_build_wheel_path( + names, # type: List[str] + temp_dir, # type: str + name, # type: str + command_args, # type: List[str] + command_output, # type: Text +): + # type: (...) -> Optional[str] + """Return the path to the wheel in the temporary build directory.""" + # Sort for determinism. + names = sorted(names) + if not names: + msg = ( + 'Legacy build of wheel for {!r} created no files.\n' + ).format(name) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + return None + + if len(names) > 1: + msg = ( + 'Legacy build of wheel for {!r} created more than one file.\n' + 'Filenames (choosing first): {}\n' + ).format(name, names) + msg += format_command_result(command_args, command_output) + logger.warning(msg) + + return os.path.join(temp_dir, names[0]) + + +def build_wheel_legacy( + name, # type: str + setup_py_path, # type: str + source_dir, # type: str + global_options, # type: List[str] + build_options, # type: List[str] + tempd, # type: str +): + # type: (...) -> Optional[str] + """Build one unpacked package using the "legacy" build process. + + Returns path to wheel if successfully built. Otherwise, returns None. + """ + wheel_args = make_setuptools_bdist_wheel_args( + setup_py_path, + global_options=global_options, + build_options=build_options, + destination_dir=tempd, + ) + + spin_message = 'Building wheel for %s (setup.py)' % (name,) + with open_spinner(spin_message) as spinner: + logger.debug('Destination directory: %s', tempd) + + try: + output = call_subprocess( + wheel_args, + cwd=source_dir, + spinner=spinner, + ) + except Exception: + spinner.finish("error") + logger.error('Failed building wheel for %s', name) + return None + + names = os.listdir(tempd) + wheel_path = get_legacy_build_wheel_path( + names=names, + temp_dir=tempd, + name=name, + command_args=wheel_args, + command_output=output, + ) + return wheel_path diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/check.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/check.py new file mode 100644 index 0000000000000000000000000000000000000000..b85a12306a4f9008ae072b5f2c88df5b9d1d3db3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/check.py @@ -0,0 +1,163 @@ +"""Validation of dependencies of packages +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + +import logging +from collections import namedtuple + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import RequirementParseError + +from pip._internal.distributions import ( + make_distribution_for_install_requirement, +) +from pip._internal.utils.misc import get_installed_distributions +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +logger = logging.getLogger(__name__) + +if MYPY_CHECK_RUNNING: + from pip._internal.req.req_install import InstallRequirement + from typing import ( + Any, Callable, Dict, Optional, Set, Tuple, List + ) + + # Shorthands + PackageSet = Dict[str, 'PackageDetails'] + Missing = Tuple[str, Any] + Conflicting = Tuple[str, str, Any] + + MissingDict = Dict[str, List[Missing]] + ConflictingDict = Dict[str, List[Conflicting]] + CheckResult = Tuple[MissingDict, ConflictingDict] + +PackageDetails = namedtuple('PackageDetails', ['version', 'requires']) + + +def create_package_set_from_installed(**kwargs): + # type: (**Any) -> Tuple[PackageSet, bool] + """Converts a list of distributions into a PackageSet. + """ + # Default to using all packages installed on the system + if kwargs == {}: + kwargs = {"local_only": False, "skip": ()} + + package_set = {} + problems = False + for dist in get_installed_distributions(**kwargs): + name = canonicalize_name(dist.project_name) + try: + package_set[name] = PackageDetails(dist.version, dist.requires()) + except RequirementParseError as e: + # Don't crash on broken metadata + logger.warning("Error parsing requirements for %s: %s", name, e) + problems = True + return package_set, problems + + +def check_package_set(package_set, should_ignore=None): + # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult + """Check if a package set is consistent + + If should_ignore is passed, it should be a callable that takes a + package name and returns a boolean. + """ + if should_ignore is None: + def should_ignore(name): + return False + + missing = {} + conflicting = {} + + for package_name in package_set: + # Info about dependencies of package_name + missing_deps = set() # type: Set[Missing] + conflicting_deps = set() # type: Set[Conflicting] + + if should_ignore(package_name): + continue + + for req in package_set[package_name].requires: + name = canonicalize_name(req.project_name) # type: str + + # Check if it's missing + if name not in package_set: + missed = True + if req.marker is not None: + missed = req.marker.evaluate() + if missed: + missing_deps.add((name, req)) + continue + + # Check if there's a conflict + version = package_set[name].version # type: str + if not req.specifier.contains(version, prereleases=True): + conflicting_deps.add((name, version, req)) + + if missing_deps: + missing[package_name] = sorted(missing_deps, key=str) + if conflicting_deps: + conflicting[package_name] = sorted(conflicting_deps, key=str) + + return missing, conflicting + + +def check_install_conflicts(to_install): + # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult] + """For checking if the dependency graph would be consistent after \ + installing given requirements + """ + # Start from the current state + package_set, _ = create_package_set_from_installed() + # Install packages + would_be_installed = _simulate_installation_of(to_install, package_set) + + # Only warn about directly-dependent packages; create a whitelist of them + whitelist = _create_whitelist(would_be_installed, package_set) + + return ( + package_set, + check_package_set( + package_set, should_ignore=lambda name: name not in whitelist + ) + ) + + +def _simulate_installation_of(to_install, package_set): + # type: (List[InstallRequirement], PackageSet) -> Set[str] + """Computes the version of packages after installing to_install. + """ + + # Keep track of packages that were installed + installed = set() + + # Modify it as installing requirement_set would (assuming no errors) + for inst_req in to_install: + abstract_dist = make_distribution_for_install_requirement(inst_req) + dist = abstract_dist.get_pkg_resources_distribution() + + name = canonicalize_name(dist.key) + package_set[name] = PackageDetails(dist.version, dist.requires()) + + installed.add(name) + + return installed + + +def _create_whitelist(would_be_installed, package_set): + # type: (Set[str], PackageSet) -> Set[str] + packages_affected = set(would_be_installed) + + for package_name in package_set: + if package_name in packages_affected: + continue + + for req in package_set[package_name].requires: + if canonicalize_name(req.name) in packages_affected: + packages_affected.add(package_name) + break + + return packages_affected diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/freeze.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/freeze.py new file mode 100644 index 0000000000000000000000000000000000000000..36a5c339a2ab22debec595af17a520a803f2a783 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/freeze.py @@ -0,0 +1,265 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import collections +import logging +import os +import re + +from pip._vendor import six +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import RequirementParseError + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.req.req_file import COMMENT_RE +from pip._internal.utils.misc import ( + dist_is_editable, + get_installed_distributions, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union + ) + from pip._internal.cache import WheelCache + from pip._vendor.pkg_resources import ( + Distribution, Requirement + ) + + RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]] + + +logger = logging.getLogger(__name__) + + +def freeze( + requirement=None, # type: Optional[List[str]] + find_links=None, # type: Optional[List[str]] + local_only=None, # type: Optional[bool] + user_only=None, # type: Optional[bool] + paths=None, # type: Optional[List[str]] + skip_regex=None, # type: Optional[str] + isolated=False, # type: bool + wheel_cache=None, # type: Optional[WheelCache] + exclude_editable=False, # type: bool + skip=() # type: Container[str] +): + # type: (...) -> Iterator[str] + find_links = find_links or [] + skip_match = None + + if skip_regex: + skip_match = re.compile(skip_regex).search + + for link in find_links: + yield '-f %s' % link + installations = {} # type: Dict[str, FrozenRequirement] + for dist in get_installed_distributions(local_only=local_only, + skip=(), + user_only=user_only, + paths=paths): + try: + req = FrozenRequirement.from_dist(dist) + except RequirementParseError as exc: + # We include dist rather than dist.project_name because the + # dist string includes more information, like the version and + # location. We also include the exception message to aid + # troubleshooting. + logger.warning( + 'Could not generate requirement for distribution %r: %s', + dist, exc + ) + continue + if exclude_editable and req.editable: + continue + installations[req.canonical_name] = req + + if requirement: + # the options that don't get turned into an InstallRequirement + # should only be emitted once, even if the same option is in multiple + # requirements files, so we need to keep track of what has been emitted + # so that we don't emit it again if it's seen again + emitted_options = set() # type: Set[str] + # keep track of which files a requirement is in so that we can + # give an accurate warning if a requirement appears multiple times. + req_files = collections.defaultdict(list) # type: Dict[str, List[str]] + for req_file_path in requirement: + with open(req_file_path) as req_file: + for line in req_file: + if (not line.strip() or + line.strip().startswith('#') or + (skip_match and skip_match(line)) or + line.startswith(( + '-r', '--requirement', + '-Z', '--always-unzip', + '-f', '--find-links', + '-i', '--index-url', + '--pre', + '--trusted-host', + '--process-dependency-links', + '--extra-index-url'))): + line = line.rstrip() + if line not in emitted_options: + emitted_options.add(line) + yield line + continue + + if line.startswith('-e') or line.startswith('--editable'): + if line.startswith('-e'): + line = line[2:].strip() + else: + line = line[len('--editable'):].strip().lstrip('=') + line_req = install_req_from_editable( + line, + isolated=isolated, + wheel_cache=wheel_cache, + ) + else: + line_req = install_req_from_line( + COMMENT_RE.sub('', line).strip(), + isolated=isolated, + wheel_cache=wheel_cache, + ) + + if not line_req.name: + logger.info( + "Skipping line in requirement file [%s] because " + "it's not clear what it would install: %s", + req_file_path, line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + else: + line_req_canonical_name = canonicalize_name( + line_req.name) + if line_req_canonical_name not in installations: + # either it's not installed, or it is installed + # but has been processed already + if not req_files[line_req.name]: + logger.warning( + "Requirement file [%s] contains %s, but " + "package %r is not installed", + req_file_path, + COMMENT_RE.sub('', line).strip(), + line_req.name + ) + else: + req_files[line_req.name].append(req_file_path) + else: + yield str(installations[ + line_req_canonical_name]).rstrip() + del installations[line_req_canonical_name] + req_files[line_req.name].append(req_file_path) + + # Warn about requirements that were included multiple times (in a + # single requirements file or in different requirements files). + for name, files in six.iteritems(req_files): + if len(files) > 1: + logger.warning("Requirement %s included multiple times [%s]", + name, ', '.join(sorted(set(files)))) + + yield( + '## The following requirements were added by ' + 'pip freeze:' + ) + for installation in sorted( + installations.values(), key=lambda x: x.name.lower()): + if installation.canonical_name not in skip: + yield str(installation).rstrip() + + +def get_requirement_info(dist): + # type: (Distribution) -> RequirementInfo + """ + Compute and return values (req, editable, comments) for use in + FrozenRequirement.from_dist(). + """ + if not dist_is_editable(dist): + return (None, False, []) + + location = os.path.normcase(os.path.abspath(dist.location)) + + from pip._internal.vcs import vcs, RemoteNotFoundError + vcs_backend = vcs.get_backend_for_dir(location) + + if vcs_backend is None: + req = dist.as_requirement() + logger.debug( + 'No VCS found for editable requirement "%s" in: %r', req, + location, + ) + comments = [ + '# Editable install with no version control ({})'.format(req) + ] + return (location, True, comments) + + try: + req = vcs_backend.get_src_requirement(location, dist.project_name) + except RemoteNotFoundError: + req = dist.as_requirement() + comments = [ + '# Editable {} install with no remote ({})'.format( + type(vcs_backend).__name__, req, + ) + ] + return (location, True, comments) + + except BadCommand: + logger.warning( + 'cannot determine version of editable source in %s ' + '(%s command not found in path)', + location, + vcs_backend.name, + ) + return (None, True, []) + + except InstallationError as exc: + logger.warning( + "Error when trying to get requirement for VCS system %s, " + "falling back to uneditable format", exc + ) + else: + if req is not None: + return (req, True, []) + + logger.warning( + 'Could not determine repository location of %s', location + ) + comments = ['## !! Could not determine repository location'] + + return (None, False, comments) + + +class FrozenRequirement(object): + def __init__(self, name, req, editable, comments=()): + # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None + self.name = name + self.canonical_name = canonicalize_name(name) + self.req = req + self.editable = editable + self.comments = comments + + @classmethod + def from_dist(cls, dist): + # type: (Distribution) -> FrozenRequirement + req, editable, comments = get_requirement_info(dist) + if req is None: + req = dist.as_requirement() + + return cls(dist.project_name, req, editable, comments=comments) + + def __str__(self): + req = self.req + if self.editable: + req = '-e %s' % req + return '\n'.join(list(self.comments) + [str(req)]) + '\n' diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..24d6a5dd31fe33b03f90ed0f9ee465253686900c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py @@ -0,0 +1,2 @@ +"""For modules related to installing packages. +""" diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90ba2491ce990df411bede7adbb7bd16d2ffeff7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..953c2cdbc9abb80dc4ee0e3bed01739a927f8120 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6293f5ac8e10dac86755ebfe4aad96ed4954bc39 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0510dd418c11fff6e806c0e28b5b366d6861506c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..a668a61dc60f50963186b5a358e1e581bb6bbf09 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py @@ -0,0 +1,52 @@ +"""Legacy editable installation process, i.e. `setup.py develop`. +""" +import logging + +from pip._internal.utils.logging import indent_log +from pip._internal.utils.setuptools_build import make_setuptools_develop_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + + from pip._internal.build_env import BuildEnvironment + + +logger = logging.getLogger(__name__) + + +def install_editable( + install_options, # type: List[str] + global_options, # type: Sequence[str] + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + name, # type: str + setup_py_path, # type: str + isolated, # type: bool + build_env, # type: BuildEnvironment + unpacked_source_directory, # type: str +): + # type: (...) -> None + """Install a package in editable mode. Most arguments are pass-through + to setuptools. + """ + logger.info('Running setup.py develop for %s', name) + + args = make_setuptools_develop_args( + setup_py_path, + global_options=global_options, + install_options=install_options, + no_user_config=isolated, + prefix=prefix, + home=home, + use_user_site=use_user_site, + ) + + with indent_log(): + with build_env: + call_subprocess( + args, + cwd=unpacked_source_directory, + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py new file mode 100644 index 0000000000000000000000000000000000000000..2d4adc4f62c81f0dcb2cd48c340102234052fac7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/legacy.py @@ -0,0 +1,129 @@ +"""Legacy installation process, i.e. `setup.py install`. +""" + +import logging +import os +from distutils.util import change_root + +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.setuptools_build import make_setuptools_install_args +from pip._internal.utils.subprocess import runner_with_spinner_message +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + + from pip._internal.models.scheme import Scheme + from pip._internal.req.req_install import InstallRequirement + + +logger = logging.getLogger(__name__) + + +def install( + install_req, # type: InstallRequirement + install_options, # type: List[str] + global_options, # type: Sequence[str] + root, # type: Optional[str] + home, # type: Optional[str] + prefix, # type: Optional[str] + use_user_site, # type: bool + pycompile, # type: bool + scheme, # type: Scheme +): + # type: (...) -> None + # Extend the list of global and install options passed on to + # the setup.py call with the ones from the requirements file. + # Options specified in requirements file override those + # specified on the command line, since the last option given + # to setup.py is the one that is used. + global_options = list(global_options) + \ + install_req.options.get('global_options', []) + install_options = list(install_options) + \ + install_req.options.get('install_options', []) + + header_dir = scheme.headers + + with TempDirectory(kind="record") as temp_dir: + record_filename = os.path.join(temp_dir.path, 'install-record.txt') + install_args = make_setuptools_install_args( + install_req.setup_py_path, + global_options=global_options, + install_options=install_options, + record_filename=record_filename, + root=root, + prefix=prefix, + header_dir=header_dir, + home=home, + use_user_site=use_user_site, + no_user_config=install_req.isolated, + pycompile=pycompile, + ) + + runner = runner_with_spinner_message( + "Running setup.py install for {}".format(install_req.name) + ) + with indent_log(), install_req.build_env: + runner( + cmd=install_args, + cwd=install_req.unpacked_source_directory, + ) + + if not os.path.exists(record_filename): + logger.debug('Record file %s not found', record_filename) + return + install_req.install_succeeded = True + + # We intentionally do not use any encoding to read the file because + # setuptools writes the file using distutils.file_util.write_file, + # which does not specify an encoding. + with open(record_filename) as f: + record_lines = f.read().splitlines() + + def prepend_root(path): + # type: (str) -> str + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + for line in record_lines: + directory = os.path.dirname(line) + if directory.endswith('.egg-info'): + egg_info_dir = prepend_root(directory) + break + else: + deprecated( + reason=( + "{} did not indicate that it installed an " + ".egg-info directory. Only setup.py projects " + "generating .egg-info directories are supported." + ).format(install_req), + replacement=( + "for maintainers: updating the setup.py of {0}. " + "For users: contact the maintainers of {0} to let " + "them know to update their setup.py.".format( + install_req.name + ) + ), + gone_in="20.2", + issue=6998, + ) + # FIXME: put the record somewhere + return + new_lines = [] + for line in record_lines: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append( + os.path.relpath(prepend_root(filename), egg_info_dir) + ) + new_lines.sort() + ensure_dir(egg_info_dir) + inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') + with open(inst_files_path, 'w') as f: + f.write('\n'.join(new_lines) + '\n') diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py new file mode 100644 index 0000000000000000000000000000000000000000..aac975c3ac8ebfed2f3d54e229a2d8d28d878865 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py @@ -0,0 +1,615 @@ +"""Support for installing and building the "wheel" binary package format. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import collections +import compileall +import csv +import logging +import os.path +import re +import shutil +import stat +import sys +import warnings +from base64 import urlsafe_b64encode +from zipfile import ZipFile + +from pip._vendor import pkg_resources +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor.distlib.util import get_export_entry +from pip._vendor.six import StringIO + +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_major_minor_version +from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.unpacking import unpack_file +from pip._internal.utils.wheel import parse_wheel + +if MYPY_CHECK_RUNNING: + from email.message import Message + from typing import ( + Dict, List, Optional, Sequence, Tuple, IO, Text, Any, + Iterable, Callable, Set, + ) + + from pip._internal.models.scheme import Scheme + + InstalledCSVRow = Tuple[str, ...] + + +logger = logging.getLogger(__name__) + + +def normpath(src, p): + # type: (str, str) -> str + return os.path.relpath(src, p).replace(os.path.sep, '/') + + +def rehash(path, blocksize=1 << 20): + # type: (str, int) -> Tuple[str, str] + """Return (encoded_digest, length) for path using hashlib.sha256()""" + h, length = hash_file(path, blocksize) + digest = 'sha256=' + urlsafe_b64encode( + h.digest() + ).decode('latin1').rstrip('=') + # unicode/str python2 issues + return (digest, str(length)) # type: ignore + + +def open_for_csv(name, mode): + # type: (str, Text) -> IO[Any] + if sys.version_info[0] < 3: + nl = {} # type: Dict[str, Any] + bin = 'b' + else: + nl = {'newline': ''} # type: Dict[str, Any] + bin = '' + return open(name, mode + bin, **nl) + + +def fix_script(path): + # type: (str) -> Optional[bool] + """Replace #!python with #!/path/to/python + Return True if file was changed. + """ + # XXX RECORD hashes will need to be updated + if os.path.isfile(path): + with open(path, 'rb') as script: + firstline = script.readline() + if not firstline.startswith(b'#!python'): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b'#!' + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, 'wb') as script: + script.write(firstline) + script.write(rest) + return True + return None + + +def wheel_root_is_purelib(metadata): + # type: (Message) -> bool + return metadata.get("Root-Is-Purelib", "").lower() == "true" + + +def get_entrypoints(filename): + # type: (str) -> Tuple[Dict[str, str], Dict[str, str]] + if not os.path.exists(filename): + return {}, {} + + # This is done because you can pass a string to entry_points wrappers which + # means that they may or may not be valid INI files. The attempt here is to + # strip leading and trailing whitespace in order to make them valid INI + # files. + with open(filename) as fp: + data = StringIO() + for line in fp: + data.write(line.strip()) + data.write("\n") + data.seek(0) + + # get the entry points and then the script names + entry_points = pkg_resources.EntryPoint.parse_map(data) + console = entry_points.get('console_scripts', {}) + gui = entry_points.get('gui_scripts', {}) + + def _split_ep(s): + # type: (pkg_resources.EntryPoint) -> Tuple[str, str] + """get the string representation of EntryPoint, + remove space and split on '=' + """ + split_parts = str(s).replace(" ", "").split("=") + return split_parts[0], split_parts[1] + + # convert the EntryPoint objects into strings with module:function + console = dict(_split_ep(v) for v in console.values()) + gui = dict(_split_ep(v) for v in gui.values()) + return console, gui + + +def message_about_scripts_not_on_PATH(scripts): + # type: (Sequence[str]) -> Optional[str] + """Determine if any scripts are not on PATH and format a warning. + Returns a warning message if one or more scripts are not on PATH, + otherwise None. + """ + if not scripts: + return None + + # Group scripts by the path they were installed in + grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]] + for destfile in scripts: + parent_dir = os.path.dirname(destfile) + script_name = os.path.basename(destfile) + grouped_by_dir[parent_dir].add(script_name) + + # We don't want to warn for directories that are on PATH. + not_warn_dirs = [ + os.path.normcase(i).rstrip(os.sep) for i in + os.environ.get("PATH", "").split(os.pathsep) + ] + # If an executable sits with sys.executable, we don't warn for it. + # This covers the case of venv invocations without activating the venv. + not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable))) + warn_for = { + parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items() + if os.path.normcase(parent_dir) not in not_warn_dirs + } # type: Dict[str, Set[str]] + if not warn_for: + return None + + # Format a message + msg_lines = [] + for parent_dir, dir_scripts in warn_for.items(): + sorted_scripts = sorted(dir_scripts) # type: List[str] + if len(sorted_scripts) == 1: + start_text = "script {} is".format(sorted_scripts[0]) + else: + start_text = "scripts {} are".format( + ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1] + ) + + msg_lines.append( + "The {} installed in '{}' which is not on PATH." + .format(start_text, parent_dir) + ) + + last_line_fmt = ( + "Consider adding {} to PATH or, if you prefer " + "to suppress this warning, use --no-warn-script-location." + ) + if len(msg_lines) == 1: + msg_lines.append(last_line_fmt.format("this directory")) + else: + msg_lines.append(last_line_fmt.format("these directories")) + + # Add a note if any directory starts with ~ + warn_for_tilde = any( + i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i + ) + if warn_for_tilde: + tilde_warning_msg = ( + "NOTE: The current PATH contains path(s) starting with `~`, " + "which may not be expanded by all applications." + ) + msg_lines.append(tilde_warning_msg) + + # Returns the formatted multiline message + return "\n".join(msg_lines) + + +def sorted_outrows(outrows): + # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow] + """Return the given rows of a RECORD file in sorted order. + + Each row is a 3-tuple (path, hash, size) and corresponds to a record of + a RECORD file (see PEP 376 and PEP 427 for details). For the rows + passed to this function, the size can be an integer as an int or string, + or the empty string. + """ + # Normally, there should only be one row per path, in which case the + # second and third elements don't come into play when sorting. + # However, in cases in the wild where a path might happen to occur twice, + # we don't want the sort operation to trigger an error (but still want + # determinism). Since the third element can be an int or string, we + # coerce each element to a string to avoid a TypeError in this case. + # For additional background, see-- + # https://github.com/pypa/pip/issues/5868 + return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) + + +def get_csv_rows_for_installed( + old_csv_rows, # type: Iterable[List[str]] + installed, # type: Dict[str, str] + changed, # type: Set[str] + generated, # type: List[str] + lib_dir, # type: str +): + # type: (...) -> List[InstalledCSVRow] + """ + :param installed: A map from archive RECORD path to installation RECORD + path. + """ + installed_rows = [] # type: List[InstalledCSVRow] + for row in old_csv_rows: + if len(row) > 3: + logger.warning( + 'RECORD line has more than three elements: {}'.format(row) + ) + # Make a copy because we are mutating the row. + row = list(row) + old_path = row[0] + new_path = installed.pop(old_path, old_path) + row[0] = new_path + if new_path in changed: + digest, length = rehash(new_path) + row[1] = digest + row[2] = length + installed_rows.append(tuple(row)) + for f in generated: + digest, length = rehash(f) + installed_rows.append((normpath(f, lib_dir), digest, str(length))) + for f in installed: + installed_rows.append((installed[f], '', '')) + return installed_rows + + +class MissingCallableSuffix(Exception): + pass + + +def _raise_for_invalid_entrypoint(specification): + # type: (str) -> None + entry = get_export_entry(specification) + if entry is not None and entry.suffix is None: + raise MissingCallableSuffix(str(entry)) + + +class PipScriptMaker(ScriptMaker): + def make(self, specification, options=None): + # type: (str, Dict[str, Any]) -> List[str] + _raise_for_invalid_entrypoint(specification) + return super(PipScriptMaker, self).make(specification, options) + + +def install_unpacked_wheel( + name, # type: str + wheeldir, # type: str + wheel_zip, # type: ZipFile + scheme, # type: Scheme + req_description, # type: str + pycompile=True, # type: bool + warn_script_location=True # type: bool +): + # type: (...) -> None + """Install a wheel. + + :param name: Name of the project to install + :param wheeldir: Base directory of the unpacked wheel + :param wheel_zip: open ZipFile for wheel being installed + :param scheme: Distutils scheme dictating the install directories + :param req_description: String used in place of the requirement, for + logging + :param pycompile: Whether to byte-compile installed Python files + :param warn_script_location: Whether to check that scripts are installed + into a directory on PATH + :raises UnsupportedWheel: + * when the directory holds an unpacked wheel with incompatible + Wheel-Version + * when the .dist-info dir does not match the wheel + """ + # TODO: Investigate and break this up. + # TODO: Look into moving this into a dedicated class for representing an + # installation. + + source = wheeldir.rstrip(os.path.sep) + os.path.sep + + info_dir, metadata = parse_wheel(wheel_zip, name) + + if wheel_root_is_purelib(metadata): + lib_dir = scheme.purelib + else: + lib_dir = scheme.platlib + + subdirs = os.listdir(source) + data_dirs = [s for s in subdirs if s.endswith('.data')] + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed = {} # type: Dict[str, str] + changed = set() + generated = [] # type: List[str] + + # Compile all of the pyc files that we're going to be installing + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings('ignore') + compileall.compile_dir(source, force=True, quiet=True) + logger.debug(stdout.getvalue()) + + def record_installed(srcfile, destfile, modified=False): + # type: (str, str, bool) -> None + """Map archive RECORD paths to installation RECORD paths.""" + oldpath = normpath(srcfile, wheeldir) + newpath = normpath(destfile, lib_dir) + installed[oldpath] = newpath + if modified: + changed.add(destfile) + + def clobber( + source, # type: str + dest, # type: str + is_base, # type: bool + fixer=None, # type: Optional[Callable[[str], Any]] + filter=None # type: Optional[Callable[[str], bool]] + ): + # type: (...) -> None + ensure_dir(dest) # common for the 'include' path + + for dir, subdirs, files in os.walk(source): + basedir = dir[len(source):].lstrip(os.path.sep) + destdir = os.path.join(dest, basedir) + if is_base and basedir == '': + subdirs[:] = [s for s in subdirs if not s.endswith('.data')] + for f in files: + # Skip unwanted files + if filter and filter(f): + continue + srcfile = os.path.join(dir, f) + destfile = os.path.join(dest, basedir, f) + # directory creation is lazy and after the file filtering above + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + ensure_dir(destdir) + + # copyfile (called below) truncates the destination if it + # exists and then writes the new contents. This is fine in most + # cases, but can cause a segfault if pip has loaded a shared + # object (e.g. from pyopenssl through its vendored urllib3) + # Since the shared object is mmap'd an attempt to call a + # symbol in it will then cause a segfault. Unlinking the file + # allows writing of new contents while allowing the process to + # continue to use the old copy. + if os.path.exists(destfile): + os.unlink(destfile) + + # We use copyfile (not move, copy, or copy2) to be extra sure + # that we are not moving directories over (copyfile fails for + # directories) as well as to ensure that we are not copying + # over any metadata because we want more control over what + # metadata we actually copy over. + shutil.copyfile(srcfile, destfile) + + # Copy over the metadata for the file, currently this only + # includes the atime and mtime. + st = os.stat(srcfile) + if hasattr(os, "utime"): + os.utime(destfile, (st.st_atime, st.st_mtime)) + + # If our file is executable, then make our destination file + # executable. + if os.access(srcfile, os.X_OK): + st = os.stat(srcfile) + permissions = ( + st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + ) + os.chmod(destfile, permissions) + + changed = False + if fixer: + changed = fixer(destfile) + record_installed(srcfile, destfile, changed) + + clobber(source, lib_dir, True) + + dest_info_dir = os.path.join(lib_dir, info_dir) + + # Get the defined entry points + ep_file = os.path.join(dest_info_dir, 'entry_points.txt') + console, gui = get_entrypoints(ep_file) + + def is_entrypoint_wrapper(name): + # type: (str) -> bool + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + if name.lower().endswith('.exe'): + matchname = name[:-4] + elif name.lower().endswith('-script.py'): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return (matchname in console or matchname in gui) + + for datadir in data_dirs: + fixer = None + filter = None + for subdir in os.listdir(os.path.join(wheeldir, datadir)): + fixer = None + if subdir == 'scripts': + fixer = fix_script + filter = is_entrypoint_wrapper + source = os.path.join(wheeldir, datadir, subdir) + dest = getattr(scheme, subdir) + clobber(source, dest, False, fixer=fixer, filter=filter) + + maker = PipScriptMaker(None, scheme.scripts) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = {''} + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + scripts_to_generate = [] + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadata 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop('pip', None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append('pip = ' + pip_script) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + scripts_to_generate.append( + 'pip%s = %s' % (sys.version_info[0], pip_script) + ) + + scripts_to_generate.append( + 'pip%s = %s' % (get_major_minor_version(), pip_script) + ) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop('easy_install', None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + scripts_to_generate.append( + 'easy_install = ' + easy_install_script + ) + + scripts_to_generate.append( + 'easy_install-%s = %s' % ( + get_major_minor_version(), easy_install_script + ) + ) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console and GUI entry points specified in the wheel + scripts_to_generate.extend( + '%s = %s' % kv for kv in console.items() + ) + + gui_scripts_to_generate = [ + '%s = %s' % kv for kv in gui.items() + ] + + generated_console_scripts = [] # type: List[str] + + try: + generated_console_scripts = maker.make_multiple(scripts_to_generate) + generated.extend(generated_console_scripts) + + generated.extend( + maker.make_multiple(gui_scripts_to_generate, {'gui': True}) + ) + except MissingCallableSuffix as e: + entry = e.args[0] + raise InstallationError( + "Invalid script entry point: {} for req: {} - A callable " + "suffix is required. Cf https://packaging.python.org/" + "specifications/entry-points/#use-for-scripts for more " + "information.".format(entry, req_description) + ) + + if warn_script_location: + msg = message_about_scripts_not_on_PATH(generated_console_scripts) + if msg is not None: + logger.warning(msg) + + # Record pip as the installer + installer = os.path.join(dest_info_dir, 'INSTALLER') + temp_installer = os.path.join(dest_info_dir, 'INSTALLER.pip') + with open(temp_installer, 'wb') as installer_file: + installer_file.write(b'pip\n') + shutil.move(temp_installer, installer) + generated.append(installer) + + # Record details of all files installed + record = os.path.join(dest_info_dir, 'RECORD') + temp_record = os.path.join(dest_info_dir, 'RECORD.pip') + with open_for_csv(record, 'r') as record_in: + with open_for_csv(temp_record, 'w+') as record_out: + reader = csv.reader(record_in) + outrows = get_csv_rows_for_installed( + reader, installed=installed, changed=changed, + generated=generated, lib_dir=lib_dir, + ) + writer = csv.writer(record_out) + # Sort to simplify testing. + for row in sorted_outrows(outrows): + writer.writerow(row) + shutil.move(temp_record, record) + + +def install_wheel( + name, # type: str + wheel_path, # type: str + scheme, # type: Scheme + req_description, # type: str + pycompile=True, # type: bool + warn_script_location=True, # type: bool + _temp_dir_for_testing=None, # type: Optional[str] +): + # type: (...) -> None + with TempDirectory( + path=_temp_dir_for_testing, kind="unpacked-wheel" + ) as unpacked_dir, ZipFile(wheel_path, allowZip64=True) as z: + unpack_file(wheel_path, unpacked_dir.path) + install_unpacked_wheel( + name=name, + wheeldir=unpacked_dir.path, + wheel_zip=z, + scheme=scheme, + req_description=req_description, + pycompile=pycompile, + warn_script_location=warn_script_location, + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/operations/prepare.py b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/prepare.py new file mode 100644 index 0000000000000000000000000000000000000000..0b61f20524d976cd2bdc2fbc0a7e32bf13729d41 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/operations/prepare.py @@ -0,0 +1,591 @@ +"""Prepares a distribution for installation +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import mimetypes +import os +import shutil +import sys + +from pip._vendor import requests +from pip._vendor.six import PY2 + +from pip._internal.distributions import ( + make_distribution_for_install_requirement, +) +from pip._internal.distributions.installed import InstalledDistribution +from pip._internal.exceptions import ( + DirectoryUrlHashUnsupported, + HashMismatch, + HashUnpinned, + InstallationError, + PreviousBuildDirError, + VcsHashUnsupported, +) +from pip._internal.utils.filesystem import copy2_fixed +from pip._internal.utils.hashes import MissingHashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.marker_files import write_delete_marker_file +from pip._internal.utils.misc import ( + ask_path_exists, + backup_dir, + display_path, + hide_url, + path_to_display, + rmtree, +) +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.unpacking import unpack_file +from pip._internal.vcs import vcs + +if MYPY_CHECK_RUNNING: + from typing import ( + Callable, List, Optional, Tuple, + ) + + from mypy_extensions import TypedDict + + from pip._internal.distributions import AbstractDistribution + from pip._internal.index.package_finder import PackageFinder + from pip._internal.models.link import Link + from pip._internal.network.download import Downloader + from pip._internal.req.req_install import InstallRequirement + from pip._internal.req.req_tracker import RequirementTracker + from pip._internal.utils.hashes import Hashes + + if PY2: + CopytreeKwargs = TypedDict( + 'CopytreeKwargs', + { + 'ignore': Callable[[str, List[str]], List[str]], + 'symlinks': bool, + }, + total=False, + ) + else: + CopytreeKwargs = TypedDict( + 'CopytreeKwargs', + { + 'copy_function': Callable[[str, str], None], + 'ignore': Callable[[str, List[str]], List[str]], + 'ignore_dangling_symlinks': bool, + 'symlinks': bool, + }, + total=False, + ) + +logger = logging.getLogger(__name__) + + +def _get_prepared_distribution( + req, # type: InstallRequirement + req_tracker, # type: RequirementTracker + finder, # type: PackageFinder + build_isolation # type: bool +): + # type: (...) -> AbstractDistribution + """Prepare a distribution for installation. + """ + abstract_dist = make_distribution_for_install_requirement(req) + with req_tracker.track(req): + abstract_dist.prepare_distribution_metadata(finder, build_isolation) + return abstract_dist + + +def unpack_vcs_link(link, location): + # type: (Link, str) -> None + vcs_backend = vcs.get_backend_for_scheme(link.scheme) + assert vcs_backend is not None + vcs_backend.unpack(location, url=hide_url(link.url)) + + +def _copy_file(filename, location, link): + # type: (str, str, Link) -> None + copy = True + download_location = os.path.join(location, link.filename) + if os.path.exists(download_location): + response = ask_path_exists( + 'The file {} exists. (i)gnore, (w)ipe, (b)ackup, (a)abort'.format( + display_path(download_location) + ), + ('i', 'w', 'b', 'a'), + ) + if response == 'i': + copy = False + elif response == 'w': + logger.warning('Deleting %s', display_path(download_location)) + os.remove(download_location) + elif response == 'b': + dest_file = backup_dir(download_location) + logger.warning( + 'Backing up %s to %s', + display_path(download_location), + display_path(dest_file), + ) + shutil.move(download_location, dest_file) + elif response == 'a': + sys.exit(-1) + if copy: + shutil.copy(filename, download_location) + logger.info('Saved %s', display_path(download_location)) + + +def unpack_http_url( + link, # type: Link + location, # type: str + downloader, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> str + temp_dir = TempDirectory(kind="unpack", globally_managed=True) + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = mimetypes.guess_type(from_path)[0] + else: + # let's download to a tmp dir + from_path, content_type = _download_http_url( + link, downloader, temp_dir.path, hashes + ) + + # unpack the archive to the build dir location. even when only + # downloading archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type) + + return from_path + + +def _copy2_ignoring_special_files(src, dest): + # type: (str, str) -> None + """Copying special files is not supported, but as a convenience to users + we skip errors copying them. This supports tools that may create e.g. + socket files in the project source directory. + """ + try: + copy2_fixed(src, dest) + except shutil.SpecialFileError as e: + # SpecialFileError may be raised due to either the source or + # destination. If the destination was the cause then we would actually + # care, but since the destination directory is deleted prior to + # copy we ignore all of them assuming it is caused by the source. + logger.warning( + "Ignoring special file error '%s' encountered copying %s to %s.", + str(e), + path_to_display(src), + path_to_display(dest), + ) + + +def _copy_source_tree(source, target): + # type: (str, str) -> None + def ignore(d, names): + # type: (str, List[str]) -> List[str] + # Pulling in those directories can potentially be very slow, + # exclude the following directories if they appear in the top + # level dir (and only it). + # See discussion at https://github.com/pypa/pip/pull/6770 + return ['.tox', '.nox'] if d == source else [] + + kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs + + if not PY2: + # Python 2 does not support copy_function, so we only ignore + # errors on special file copy in Python 3. + kwargs['copy_function'] = _copy2_ignoring_special_files + + shutil.copytree(source, target, **kwargs) + + +def unpack_file_url( + link, # type: Link + location, # type: str + download_dir=None, # type: Optional[str] + hashes=None # type: Optional[Hashes] +): + # type: (...) -> Optional[str] + """Unpack link into location. + """ + link_path = link.file_path + # If it's a url to a local directory + if link.is_existing_dir(): + if os.path.isdir(location): + rmtree(location) + _copy_source_tree(link_path, location) + return None + + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir( + link, download_dir, hashes + ) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link_path + + # If --require-hashes is off, `hashes` is either empty, the + # link's embedded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(from_path) + + content_type = mimetypes.guess_type(from_path)[0] + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type) + + return from_path + + +def unpack_url( + link, # type: Link + location, # type: str + downloader, # type: Downloader + download_dir=None, # type: Optional[str] + hashes=None, # type: Optional[Hashes] +): + # type: (...) -> Optional[str] + """Unpack link into location, downloading if required. + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if link.is_vcs: + unpack_vcs_link(link, location) + return None + + # file urls + elif link.is_file: + return unpack_file_url(link, location, download_dir, hashes=hashes) + + # http urls + else: + return unpack_http_url( + link, + location, + downloader, + download_dir, + hashes=hashes, + ) + + +def _download_http_url( + link, # type: Link + downloader, # type: Downloader + temp_dir, # type: str + hashes, # type: Optional[Hashes] +): + # type: (...) -> Tuple[str, str] + """Download link url into temp_dir using provided session""" + download = downloader(link) + + file_path = os.path.join(temp_dir, download.filename) + with open(file_path, 'wb') as content_file: + for chunk in download.chunks: + content_file.write(chunk) + + if hashes: + hashes.check_against_path(file_path) + + return file_path, download.response.headers.get('content-type', '') + + +def _check_download_dir(link, download_dir, hashes): + # type: (Link, str, Optional[Hashes]) -> Optional[str] + """ Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + + if not os.path.exists(download_path): + return None + + # If already downloaded, does its hash match? + logger.info('File was already downloaded %s', download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + 'Previously-downloaded file %s has bad hash. ' + 'Re-downloading.', + download_path + ) + os.unlink(download_path) + return None + return download_path + + +class RequirementPreparer(object): + """Prepares a Requirement + """ + + def __init__( + self, + build_dir, # type: str + download_dir, # type: Optional[str] + src_dir, # type: str + wheel_download_dir, # type: Optional[str] + build_isolation, # type: bool + req_tracker, # type: RequirementTracker + downloader, # type: Downloader + finder, # type: PackageFinder + require_hashes, # type: bool + use_user_site, # type: bool + ): + # type: (...) -> None + super(RequirementPreparer, self).__init__() + + self.src_dir = src_dir + self.build_dir = build_dir + self.req_tracker = req_tracker + self.downloader = downloader + self.finder = finder + + # Where still-packed archives should be written to. If None, they are + # not saved, and are deleted immediately after unpacking. + self.download_dir = download_dir + + # Where still-packed .whl files should be written to. If None, they are + # written to the download_dir parameter. Separate to download_dir to + # permit only keeping wheel archives for pip wheel. + self.wheel_download_dir = wheel_download_dir + + # NOTE + # download_dir and wheel_download_dir overlap semantically and may + # be combined if we're willing to have non-wheel archives present in + # the wheelhouse output by 'pip wheel'. + + # Is build isolation allowed? + self.build_isolation = build_isolation + + # Should hash-checking be required? + self.require_hashes = require_hashes + + # Should install in user site-packages? + self.use_user_site = use_user_site + + @property + def _download_should_save(self): + # type: () -> bool + if not self.download_dir: + return False + + if os.path.exists(self.download_dir): + return True + + logger.critical('Could not find download directory') + raise InstallationError( + "Could not find or access download directory '{}'" + .format(self.download_dir)) + + def prepare_linked_requirement( + self, + req, # type: InstallRequirement + ): + # type: (...) -> AbstractDistribution + """Prepare a requirement that would be obtained from req.link + """ + assert req.link + link = req.link + + # TODO: Breakup into smaller functions + if link.scheme == 'file': + path = link.file_path + logger.info('Processing %s', display_path(path)) + else: + logger.info('Collecting %s', req.req or req) + + with indent_log(): + # @@ if filesystem packages are not marked + # editable in a req, a non deterministic error + # occurs when the script attempts to unpack the + # build directory + # Since source_dir is only set for editable requirements. + assert req.source_dir is None + req.ensure_has_source_dir(self.build_dir) + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req.source_dir` + if os.path.exists(os.path.join(req.source_dir, 'setup.py')): + raise PreviousBuildDirError( + "pip can't proceed with requirements '{}' due to a" + " pre-existing build directory ({}). This is " + "likely due to a previous installation that failed" + ". pip is being responsible and not assuming it " + "can delete this. Please delete it and try again." + .format(req, req.source_dir) + ) + + # Now that we have the real link, we can tell what kind of + # requirements we have and raise some more informative errors + # than otherwise. (For example, we can raise VcsHashUnsupported + # for a VCS URL rather than HashMissing.) + if self.require_hashes: + # We could check these first 2 conditions inside + # unpack_url and save repetition of conditions, but then + # we would report less-useful error messages for + # unhashable requirements, complaining that there's no + # hash provided. + if link.is_vcs: + raise VcsHashUnsupported() + elif link.is_existing_dir(): + raise DirectoryUrlHashUnsupported() + if not req.original_link and not req.is_pinned: + # Unpinned packages are asking for trouble when a new + # version is uploaded. This isn't a security check, but + # it saves users a surprising hash mismatch in the + # future. + # + # file:/// URLs aren't pinnable, so don't complain + # about them not being pinned. + raise HashUnpinned() + + hashes = req.hashes(trust_internet=not self.require_hashes) + if self.require_hashes and not hashes: + # Known-good hashes are missing for this requirement, so + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + hashes = MissingHashes() + + download_dir = self.download_dir + if link.is_wheel and self.wheel_download_dir: + # when doing 'pip wheel` we download wheels to a + # dedicated dir. + download_dir = self.wheel_download_dir + + try: + local_path = unpack_url( + link, req.source_dir, self.downloader, download_dir, + hashes=hashes, + ) + except requests.HTTPError as exc: + logger.critical( + 'Could not install requirement %s because of error %s', + req, + exc, + ) + raise InstallationError( + 'Could not install requirement {} because of HTTP ' + 'error {} for URL {}'.format(req, exc, link) + ) + + # For use in later processing, preserve the file path on the + # requirement. + if local_path: + req.local_file_path = local_path + + if link.is_wheel: + if download_dir: + # When downloading, we only unpack wheels to get + # metadata. + autodelete_unpacked = True + else: + # When installing a wheel, we use the unpacked + # wheel. + autodelete_unpacked = False + else: + # We always delete unpacked sdists after pip runs. + autodelete_unpacked = True + if autodelete_unpacked: + write_delete_marker_file(req.source_dir) + + abstract_dist = _get_prepared_distribution( + req, self.req_tracker, self.finder, self.build_isolation, + ) + + if download_dir: + if link.is_existing_dir(): + logger.info('Link is a directory, ignoring download_dir') + elif local_path and not os.path.exists( + os.path.join(download_dir, link.filename) + ): + _copy_file(local_path, download_dir, link) + + if self._download_should_save: + # Make a .zip of the source_dir we already created. + if link.is_vcs: + req.archive(self.download_dir) + return abstract_dist + + def prepare_editable_requirement( + self, + req, # type: InstallRequirement + ): + # type: (...) -> AbstractDistribution + """Prepare an editable requirement + """ + assert req.editable, "cannot prepare a non-editable req as editable" + + logger.info('Obtaining %s', req) + + with indent_log(): + if self.require_hashes: + raise InstallationError( + 'The editable requirement {} cannot be installed when ' + 'requiring hashes, because there is no single file to ' + 'hash.'.format(req) + ) + req.ensure_has_source_dir(self.src_dir) + req.update_editable(not self._download_should_save) + + abstract_dist = _get_prepared_distribution( + req, self.req_tracker, self.finder, self.build_isolation, + ) + + if self._download_should_save: + req.archive(self.download_dir) + req.check_if_exists(self.use_user_site) + + return abstract_dist + + def prepare_installed_requirement( + self, + req, # type: InstallRequirement + skip_reason # type: str + ): + # type: (...) -> AbstractDistribution + """Prepare an already-installed requirement + """ + assert req.satisfied_by, "req should have been satisfied but isn't" + assert skip_reason is not None, ( + "did not get skip reason skipped but req.satisfied_by " + "is set to {}".format(req.satisfied_by) + ) + logger.info( + 'Requirement %s: %s (%s)', + skip_reason, req, req.satisfied_by.version + ) + with indent_log(): + if self.require_hashes: + logger.debug( + 'Since it is already installed, we are trusting this ' + 'package without checking its hash. To ensure a ' + 'completely repeatable environment, install into an ' + 'empty virtualenv.' + ) + abstract_dist = InstalledDistribution(req) + + return abstract_dist diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/pep425tags.py b/backend/test/lib/python3.8/site-packages/pip/_internal/pep425tags.py new file mode 100644 index 0000000000000000000000000000000000000000..a2386ee75b893d3e52ac54cd8f35c785b47d5519 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/pep425tags.py @@ -0,0 +1,167 @@ +"""Generate and work with PEP 425 Compatibility Tags.""" +from __future__ import absolute_import + +import logging +import re + +from pip._vendor.packaging.tags import ( + Tag, + compatible_tags, + cpython_tags, + generic_tags, + interpreter_name, + interpreter_version, + mac_platforms, +) + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Tuple + + from pip._vendor.packaging.tags import PythonVersion + +logger = logging.getLogger(__name__) + +_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') + + +def version_info_to_nodot(version_info): + # type: (Tuple[int, ...]) -> str + # Only use up to the first two numbers. + return ''.join(map(str, version_info[:2])) + + +def _mac_platforms(arch): + # type: (str) -> List[str] + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + mac_version = (int(major), int(minor)) + arches = [ + # Since we have always only checked that the platform starts + # with "macosx", for backwards-compatibility we extract the + # actual prefix provided by the user in case they provided + # something like "macosxcustom_". It may be good to remove + # this as undocumented or deprecate it in the future. + '{}_{}'.format(name, arch[len('macosx_'):]) + for arch in mac_platforms(mac_version, actual_arch) + ] + else: + # arch pattern didn't match (?!) + arches = [arch] + return arches + + +def _custom_manylinux_platforms(arch): + # type: (str) -> List[str] + arches = [arch] + arch_prefix, arch_sep, arch_suffix = arch.partition('_') + if arch_prefix == 'manylinux2014': + # manylinux1/manylinux2010 wheels run on most manylinux2014 systems + # with the exception of wheels depending on ncurses. PEP 599 states + # manylinux1/manylinux2010 wheels should be considered + # manylinux2014 wheels: + # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels + if arch_suffix in {'i686', 'x86_64'}: + arches.append('manylinux2010' + arch_sep + arch_suffix) + arches.append('manylinux1' + arch_sep + arch_suffix) + elif arch_prefix == 'manylinux2010': + # manylinux1 wheels run on most manylinux2010 systems with the + # exception of wheels depending on ncurses. PEP 571 states + # manylinux1 wheels should be considered manylinux2010 wheels: + # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels + arches.append('manylinux1' + arch_sep + arch_suffix) + return arches + + +def _get_custom_platforms(arch): + # type: (str) -> List[str] + arch_prefix, arch_sep, arch_suffix = arch.partition('_') + if arch.startswith('macosx'): + arches = _mac_platforms(arch) + elif arch_prefix in ['manylinux2014', 'manylinux2010']: + arches = _custom_manylinux_platforms(arch) + else: + arches = [arch] + return arches + + +def _get_python_version(version): + # type: (str) -> PythonVersion + if len(version) > 1: + return int(version[0]), int(version[1:]) + else: + return (int(version[0]),) + + +def _get_custom_interpreter(implementation=None, version=None): + # type: (Optional[str], Optional[str]) -> str + if implementation is None: + implementation = interpreter_name() + if version is None: + version = interpreter_version() + return "{}{}".format(implementation, version) + + +def get_supported( + version=None, # type: Optional[str] + platform=None, # type: Optional[str] + impl=None, # type: Optional[str] + abi=None # type: Optional[str] +): + # type: (...) -> List[Tag] + """Return a list of supported tags for each version specified in + `versions`. + + :param version: a string version, of the form "33" or "32", + or None. The version will be assumed to support our ABI. + :param platform: specify the exact platform you want valid + tags for, or None. If None, use the local system platform. + :param impl: specify the exact implementation you want valid + tags for, or None. If None, use the local interpreter impl. + :param abi: specify the exact abi you want valid + tags for, or None. If None, use the local interpreter abi. + """ + supported = [] # type: List[Tag] + + python_version = None # type: Optional[PythonVersion] + if version is not None: + python_version = _get_python_version(version) + + interpreter = _get_custom_interpreter(impl, version) + + abis = None # type: Optional[List[str]] + if abi is not None: + abis = [abi] + + platforms = None # type: Optional[List[str]] + if platform is not None: + platforms = _get_custom_platforms(platform) + + is_cpython = (impl or interpreter_name()) == "cp" + if is_cpython: + supported.extend( + cpython_tags( + python_version=python_version, + abis=abis, + platforms=platforms, + ) + ) + else: + supported.extend( + generic_tags( + interpreter=interpreter, + abis=abis, + platforms=platforms, + ) + ) + supported.extend( + compatible_tags( + python_version=python_version, + interpreter=interpreter, + platforms=platforms, + ) + ) + + return supported diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/pyproject.py b/backend/test/lib/python3.8/site-packages/pip/_internal/pyproject.py new file mode 100644 index 0000000000000000000000000000000000000000..6b4faf7a7527cecf3fdd1cb32f8193d358f3c8fe --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/pyproject.py @@ -0,0 +1,196 @@ +from __future__ import absolute_import + +import io +import os +import sys +from collections import namedtuple + +from pip._vendor import six, toml +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Optional, List + + +def _is_list_of_str(obj): + # type: (Any) -> bool + return ( + isinstance(obj, list) and + all(isinstance(item, six.string_types) for item in obj) + ) + + +def make_pyproject_path(unpacked_source_directory): + # type: (str) -> str + path = os.path.join(unpacked_source_directory, 'pyproject.toml') + + # Python2 __file__ should not be unicode + if six.PY2 and isinstance(path, six.text_type): + path = path.encode(sys.getfilesystemencoding()) + + return path + + +BuildSystemDetails = namedtuple('BuildSystemDetails', [ + 'requires', 'backend', 'check', 'backend_path' +]) + + +def load_pyproject_toml( + use_pep517, # type: Optional[bool] + pyproject_toml, # type: str + setup_py, # type: str + req_name # type: str +): + # type: (...) -> Optional[BuildSystemDetails] + """Load the pyproject.toml file. + + Parameters: + use_pep517 - Has the user requested PEP 517 processing? None + means the user hasn't explicitly specified. + pyproject_toml - Location of the project's pyproject.toml file + setup_py - Location of the project's setup.py file + req_name - The name of the requirement we're processing (for + error reporting) + + Returns: + None if we should use the legacy code path, otherwise a tuple + ( + requirements from pyproject.toml, + name of PEP 517 backend, + requirements we should check are installed after setting + up the build environment + directory paths to import the backend from (backend-path), + relative to the project root. + ) + """ + has_pyproject = os.path.isfile(pyproject_toml) + has_setup = os.path.isfile(setup_py) + + if has_pyproject: + with io.open(pyproject_toml, encoding="utf-8") as f: + pp_toml = toml.load(f) + build_system = pp_toml.get("build-system") + else: + build_system = None + + # The following cases must use PEP 517 + # We check for use_pep517 being non-None and falsey because that means + # the user explicitly requested --no-use-pep517. The value 0 as + # opposed to False can occur when the value is provided via an + # environment variable or config file option (due to the quirk of + # strtobool() returning an integer in pip's configuration code). + if has_pyproject and not has_setup: + if use_pep517 is not None and not use_pep517: + raise InstallationError( + "Disabling PEP 517 processing is invalid: " + "project does not have a setup.py" + ) + use_pep517 = True + elif build_system and "build-backend" in build_system: + if use_pep517 is not None and not use_pep517: + raise InstallationError( + "Disabling PEP 517 processing is invalid: " + "project specifies a build backend of {} " + "in pyproject.toml".format( + build_system["build-backend"] + ) + ) + use_pep517 = True + + # If we haven't worked out whether to use PEP 517 yet, + # and the user hasn't explicitly stated a preference, + # we do so if the project has a pyproject.toml file. + elif use_pep517 is None: + use_pep517 = has_pyproject + + # At this point, we know whether we're going to use PEP 517. + assert use_pep517 is not None + + # If we're using the legacy code path, there is nothing further + # for us to do here. + if not use_pep517: + return None + + if build_system is None: + # Either the user has a pyproject.toml with no build-system + # section, or the user has no pyproject.toml, but has opted in + # explicitly via --use-pep517. + # In the absence of any explicit backend specification, we + # assume the setuptools backend that most closely emulates the + # traditional direct setup.py execution, and require wheel and + # a version of setuptools that supports that backend. + + build_system = { + "requires": ["setuptools>=40.8.0", "wheel"], + "build-backend": "setuptools.build_meta:__legacy__", + } + + # If we're using PEP 517, we have build system information (either + # from pyproject.toml, or defaulted by the code above). + # Note that at this point, we do not know if the user has actually + # specified a backend, though. + assert build_system is not None + + # Ensure that the build-system section in pyproject.toml conforms + # to PEP 518. + error_template = ( + "{package} has a pyproject.toml file that does not comply " + "with PEP 518: {reason}" + ) + + # Specifying the build-system table but not the requires key is invalid + if "requires" not in build_system: + raise InstallationError( + error_template.format(package=req_name, reason=( + "it has a 'build-system' table but not " + "'build-system.requires' which is mandatory in the table" + )) + ) + + # Error out if requires is not a list of strings + requires = build_system["requires"] + if not _is_list_of_str(requires): + raise InstallationError(error_template.format( + package=req_name, + reason="'build-system.requires' is not a list of strings.", + )) + + # Each requirement must be valid as per PEP 508 + for requirement in requires: + try: + Requirement(requirement) + except InvalidRequirement: + raise InstallationError( + error_template.format( + package=req_name, + reason=( + "'build-system.requires' contains an invalid " + "requirement: {!r}".format(requirement) + ), + ) + ) + + backend = build_system.get("build-backend") + backend_path = build_system.get("backend-path", []) + check = [] # type: List[str] + if backend is None: + # If the user didn't specify a backend, we assume they want to use + # the setuptools backend. But we can't be sure they have included + # a version of setuptools which supplies the backend, or wheel + # (which is needed by the backend) in their requirements. So we + # make a note to check that those requirements are present once + # we have set up the environment. + # This is quite a lot of work to check for a very specific case. But + # the problem is, that case is potentially quite common - projects that + # adopted PEP 518 early for the ability to specify requirements to + # execute setup.py, but never considered needing to mention the build + # tools themselves. The original PEP 518 code had a similar check (but + # implemented in a different way). + backend = "setuptools.build_meta:__legacy__" + check = ["setuptools>=40.8.0", "wheel"] + + return BuildSystemDetails(requires, backend, check, backend_path) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d2d027adeec4dbedaf62f95b070d7fd9f1fbbe60 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__init__.py @@ -0,0 +1,92 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import logging + +from pip._internal.utils.logging import indent_log +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +from .req_file import parse_requirements +from .req_install import InstallRequirement +from .req_set import RequirementSet + +if MYPY_CHECK_RUNNING: + from typing import Any, List, Sequence + +__all__ = [ + "RequirementSet", "InstallRequirement", + "parse_requirements", "install_given_reqs", +] + +logger = logging.getLogger(__name__) + + +class InstallationResult(object): + def __init__(self, name): + # type: (str) -> None + self.name = name + + def __repr__(self): + # type: () -> str + return "InstallationResult(name={!r})".format(self.name) + + +def install_given_reqs( + to_install, # type: List[InstallRequirement] + install_options, # type: List[str] + global_options=(), # type: Sequence[str] + *args, # type: Any + **kwargs # type: Any +): + # type: (...) -> List[InstallationResult] + """ + Install everything in the given list. + + (to be called after having downloaded and unpacked the packages) + """ + + if to_install: + logger.info( + 'Installing collected packages: %s', + ', '.join([req.name for req in to_install]), + ) + + installed = [] + + with indent_log(): + for requirement in to_install: + if requirement.should_reinstall: + logger.info('Attempting uninstall: %s', requirement.name) + with indent_log(): + uninstalled_pathset = requirement.uninstall( + auto_confirm=True + ) + try: + requirement.install( + install_options, + global_options, + *args, + **kwargs + ) + except Exception: + should_rollback = ( + requirement.should_reinstall and + not requirement.install_succeeded + ) + # if install did not succeed, rollback previous uninstall + if should_rollback: + uninstalled_pathset.rollback() + raise + else: + should_commit = ( + requirement.should_reinstall and + requirement.install_succeeded + ) + if should_commit: + uninstalled_pathset.commit() + + installed.append(InstallationResult(requirement.name)) + + return installed diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e9282bc4d7cb96bb46e265943ed2f26bdbfe549 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/constructors.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/constructors.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f832eb79a64e665ff330cfd4cd084d3523b5f557 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/constructors.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_file.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_file.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46ab91483595946dee9f6c08fff6f1adaf4fd70d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_file.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_install.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_install.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..92fe9a237217f15625775b4fb4b29cba07fdc095 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_install.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_set.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_set.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..852db7d816e88d7b059a914bb710c9a09048320c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_set.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c7778828987da8ae61c4f1114e6946b6a7db1516 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f44592c758f9de8c1d0335e6beda738baef2680b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/constructors.py b/backend/test/lib/python3.8/site-packages/pip/_internal/req/constructors.py new file mode 100644 index 0000000000000000000000000000000000000000..1f3cd8a104c92d804f3086bb519a9ccf24cc46de --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/req/constructors.py @@ -0,0 +1,436 @@ +"""Backing implementation for InstallRequirement's various constructors + +The idea here is that these formed a major chunk of InstallRequirement's size +so, moving them and support code dedicated to them outside of that class +helps creates for better understandability for the rest of the code. + +These are meant to be used elsewhere within pip to create instances of +InstallRequirement. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import os +import re + +from pip._vendor.packaging.markers import Marker +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement +from pip._vendor.packaging.specifiers import Specifier +from pip._vendor.pkg_resources import RequirementParseError, parse_requirements + +from pip._internal.exceptions import InstallationError +from pip._internal.models.index import PyPI, TestPyPI +from pip._internal.models.link import Link +from pip._internal.models.wheel import Wheel +from pip._internal.pyproject import make_pyproject_path +from pip._internal.req.req_install import InstallRequirement +from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS +from pip._internal.utils.misc import is_installable_dir, splitext +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import is_url, vcs + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Dict, Optional, Set, Tuple, Union, + ) + from pip._internal.cache import WheelCache + + +__all__ = [ + "install_req_from_editable", "install_req_from_line", + "parse_editable" +] + +logger = logging.getLogger(__name__) +operators = Specifier._operators.keys() + + +def is_archive_file(name): + # type: (str) -> bool + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False + + +def _strip_extras(path): + # type: (str) -> Tuple[str, Optional[str]] + m = re.match(r'^(.+)(\[[^\]]+\])$', path) + extras = None + if m: + path_no_extras = m.group(1) + extras = m.group(2) + else: + path_no_extras = path + + return path_no_extras, extras + + +def convert_extras(extras): + # type: (Optional[str]) -> Set[str] + if not extras: + return set() + return Requirement("placeholder" + extras.lower()).extras + + +def parse_editable(editable_req): + # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]] + """Parses an editable requirement into: + - a requirement name + - an URL + - extras + - editable options + Accepted requirements: + svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir + .[some_extra] + """ + + url = editable_req + + # If a file path is specified with extras, strip off the extras. + url_no_extras, extras = _strip_extras(url) + + if os.path.isdir(url_no_extras): + if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): + msg = ( + 'File "setup.py" not found. Directory cannot be installed ' + 'in editable mode: {}'.format(os.path.abspath(url_no_extras)) + ) + pyproject_path = make_pyproject_path(url_no_extras) + if os.path.isfile(pyproject_path): + msg += ( + '\n(A "pyproject.toml" file was found, but editable ' + 'mode currently requires a setup.py based build.)' + ) + raise InstallationError(msg) + + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith('file:'): + package_name = Link(url_no_extras).egg_fragment + if extras: + return ( + package_name, + url_no_extras, + Requirement("placeholder" + extras.lower()).extras, + ) + else: + return package_name, url_no_extras, None + + for version_control in vcs: + if url.lower().startswith('%s:' % version_control): + url = '%s+%s' % (version_control, url) + break + + if '+' not in url: + raise InstallationError( + '{} is not a valid editable requirement. ' + 'It should either be a path to a local project or a VCS URL ' + '(beginning with svn+, git+, hg+, or bzr+).'.format(editable_req) + ) + + vc_type = url.split('+', 1)[0].lower() + + if not vcs.get_backend(vc_type): + error_message = 'For --editable=%s only ' % editable_req + \ + ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ + ' is currently supported' + raise InstallationError(error_message) + + package_name = Link(url).egg_fragment + if not package_name: + raise InstallationError( + "Could not detect requirement name for '%s', please specify one " + "with #egg=your_package_name" % editable_req + ) + return package_name, url, None + + +def deduce_helpful_msg(req): + # type: (str) -> str + """Returns helpful msg in case requirements file does not exist, + or cannot be parsed. + + :params req: Requirements file path + """ + msg = "" + if os.path.exists(req): + msg = " It does exist." + # Try to parse and check if it is a requirements file. + try: + with open(req, 'r') as fp: + # parse first line only + next(parse_requirements(fp.read())) + msg += " The argument you provided " + \ + "(%s) appears to be a" % (req) + \ + " requirements file. If that is the" + \ + " case, use the '-r' flag to install" + \ + " the packages specified within it." + except RequirementParseError: + logger.debug("Cannot parse '%s' as requirements \ + file" % (req), exc_info=True) + else: + msg += " File '%s' does not exist." % (req) + return msg + + +class RequirementParts(object): + def __init__( + self, + requirement, # type: Optional[Requirement] + link, # type: Optional[Link] + markers, # type: Optional[Marker] + extras, # type: Set[str] + ): + self.requirement = requirement + self.link = link + self.markers = markers + self.extras = extras + + +def parse_req_from_editable(editable_req): + # type: (str) -> RequirementParts + name, url, extras_override = parse_editable(editable_req) + + if name is not None: + try: + req = Requirement(name) + except InvalidRequirement: + raise InstallationError("Invalid requirement: '%s'" % name) + else: + req = None + + link = Link(url) + + return RequirementParts(req, link, None, extras_override) + + +# ---- The actual constructors follow ---- + + +def install_req_from_editable( + editable_req, # type: str + comes_from=None, # type: Optional[str] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Dict[str, Any]] + wheel_cache=None, # type: Optional[WheelCache] + constraint=False # type: bool +): + # type: (...) -> InstallRequirement + + parts = parse_req_from_editable(editable_req) + + source_dir = parts.link.file_path if parts.link.scheme == 'file' else None + + return InstallRequirement( + parts.requirement, comes_from, source_dir=source_dir, + editable=True, + link=parts.link, + constraint=constraint, + use_pep517=use_pep517, + isolated=isolated, + options=options if options else {}, + wheel_cache=wheel_cache, + extras=parts.extras, + ) + + +def _looks_like_path(name): + # type: (str) -> bool + """Checks whether the string "looks like" a path on the filesystem. + + This does not check whether the target actually exists, only judge from the + appearance. + + Returns true if any of the following conditions is true: + * a path separator is found (either os.path.sep or os.path.altsep); + * a dot is found (which represents the current directory). + """ + if os.path.sep in name: + return True + if os.path.altsep is not None and os.path.altsep in name: + return True + if name.startswith("."): + return True + return False + + +def _get_url_from_path(path, name): + # type: (str, str) -> str + """ + First, it checks whether a provided path is an installable directory + (e.g. it has a setup.py). If it is, returns the path. + + If false, check if the path is an archive file (such as a .whl). + The function checks if the path is a file. If false, if the path has + an @, it will treat it as a PEP 440 URL requirement and return the path. + """ + if _looks_like_path(name) and os.path.isdir(path): + if is_installable_dir(path): + return path_to_url(path) + raise InstallationError( + "Directory %r is not installable. Neither 'setup.py' " + "nor 'pyproject.toml' found." % name + ) + if not is_archive_file(path): + return None + if os.path.isfile(path): + return path_to_url(path) + urlreq_parts = name.split('@', 1) + if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]): + # If the path contains '@' and the part before it does not look + # like a path, try to treat it as a PEP 440 URL req instead. + return None + logger.warning( + 'Requirement %r looks like a filename, but the ' + 'file does not exist', + name + ) + return path_to_url(path) + + +def parse_req_from_line(name, line_source): + # type: (str, Optional[str]) -> RequirementParts + if is_url(name): + marker_sep = '; ' + else: + marker_sep = ';' + if marker_sep in name: + name, markers_as_string = name.split(marker_sep, 1) + markers_as_string = markers_as_string.strip() + if not markers_as_string: + markers = None + else: + markers = Marker(markers_as_string) + else: + markers = None + name = name.strip() + req_as_string = None + path = os.path.normpath(os.path.abspath(name)) + link = None + extras_as_string = None + + if is_url(name): + link = Link(name) + else: + p, extras_as_string = _strip_extras(path) + url = _get_url_from_path(p, name) + if url is not None: + link = Link(url) + + # it's a local file, dir, or url + if link: + # Handle relative file URLs + if link.scheme == 'file' and re.search(r'\.\./', link.url): + link = Link( + path_to_url(os.path.normpath(os.path.abspath(link.path)))) + # wheel file + if link.is_wheel: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + req_as_string = "%s==%s" % (wheel.name, wheel.version) + else: + # set the req to the egg fragment. when it's not there, this + # will become an 'unnamed' requirement + req_as_string = link.egg_fragment + + # a requirement specifier + else: + req_as_string = name + + extras = convert_extras(extras_as_string) + + def with_source(text): + # type: (str) -> str + if not line_source: + return text + return '{} (from {})'.format(text, line_source) + + if req_as_string is not None: + try: + req = Requirement(req_as_string) + except InvalidRequirement: + if os.path.sep in req_as_string: + add_msg = "It looks like a path." + add_msg += deduce_helpful_msg(req_as_string) + elif ('=' in req_as_string and + not any(op in req_as_string for op in operators)): + add_msg = "= is not a valid operator. Did you mean == ?" + else: + add_msg = '' + msg = with_source( + 'Invalid requirement: {!r}'.format(req_as_string) + ) + if add_msg: + msg += '\nHint: {}'.format(add_msg) + raise InstallationError(msg) + else: + req = None + + return RequirementParts(req, link, markers, extras) + + +def install_req_from_line( + name, # type: str + comes_from=None, # type: Optional[Union[str, InstallRequirement]] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Dict[str, Any]] + wheel_cache=None, # type: Optional[WheelCache] + constraint=False, # type: bool + line_source=None, # type: Optional[str] +): + # type: (...) -> InstallRequirement + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + + :param line_source: An optional string describing where the line is from, + for logging purposes in case of an error. + """ + parts = parse_req_from_line(name, line_source) + + return InstallRequirement( + parts.requirement, comes_from, link=parts.link, markers=parts.markers, + use_pep517=use_pep517, isolated=isolated, + options=options if options else {}, + wheel_cache=wheel_cache, + constraint=constraint, + extras=parts.extras, + ) + + +def install_req_from_req_string( + req_string, # type: str + comes_from=None, # type: Optional[InstallRequirement] + isolated=False, # type: bool + wheel_cache=None, # type: Optional[WheelCache] + use_pep517=None # type: Optional[bool] +): + # type: (...) -> InstallRequirement + try: + req = Requirement(req_string) + except InvalidRequirement: + raise InstallationError("Invalid requirement: '%s'" % req_string) + + domains_not_allowed = [ + PyPI.file_storage_domain, + TestPyPI.file_storage_domain, + ] + if (req.url and comes_from and comes_from.link and + comes_from.link.netloc in domains_not_allowed): + # Explicitly disallow pypi packages that depend on external urls + raise InstallationError( + "Packages installed from PyPI cannot depend on packages " + "which are not also hosted on PyPI.\n" + "%s depends on %s " % (comes_from.name, req) + ) + + return InstallRequirement( + req, comes_from, isolated=isolated, wheel_cache=wheel_cache, + use_pep517=use_pep517 + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_file.py b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_file.py new file mode 100644 index 0000000000000000000000000000000000000000..8c7810481ee6504faaf08a0b7d1e0790ad9cc089 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_file.py @@ -0,0 +1,546 @@ +""" +Requirements file parsing +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import optparse +import os +import re +import shlex +import sys + +from pip._vendor.six.moves import filterfalse +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.cli import cmdoptions +from pip._internal.exceptions import ( + InstallationError, + RequirementsFileParseError, +) +from pip._internal.models.search_scope import SearchScope +from pip._internal.req.constructors import ( + install_req_from_editable, + install_req_from_line, +) +from pip._internal.utils.encoding import auto_decode +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import get_url_scheme + +if MYPY_CHECK_RUNNING: + from optparse import Values + from typing import ( + Any, Callable, Iterator, List, NoReturn, Optional, Text, Tuple, + ) + + from pip._internal.req import InstallRequirement + from pip._internal.cache import WheelCache + from pip._internal.index.package_finder import PackageFinder + from pip._internal.network.session import PipSession + + ReqFileLines = Iterator[Tuple[int, Text]] + + LineParser = Callable[[Text], Tuple[str, Values]] + + +__all__ = ['parse_requirements'] + +SCHEME_RE = re.compile(r'^(http|https|file):', re.I) +COMMENT_RE = re.compile(r'(^|\s+)#.*$') + +# Matches environment variable-style values in '${MY_VARIABLE_1}' with the +# variable name consisting of only uppercase letters, digits or the '_' +# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1, +# 2013 Edition. +ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})') + +SUPPORTED_OPTIONS = [ + cmdoptions.index_url, + cmdoptions.extra_index_url, + cmdoptions.no_index, + cmdoptions.constraints, + cmdoptions.requirements, + cmdoptions.editable, + cmdoptions.find_links, + cmdoptions.no_binary, + cmdoptions.only_binary, + cmdoptions.require_hashes, + cmdoptions.pre, + cmdoptions.trusted_host, + cmdoptions.always_unzip, # Deprecated +] # type: List[Callable[..., optparse.Option]] + +# options to be passed to requirements +SUPPORTED_OPTIONS_REQ = [ + cmdoptions.install_options, + cmdoptions.global_options, + cmdoptions.hash, +] # type: List[Callable[..., optparse.Option]] + +# the 'dest' string values +SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ] + + +class ParsedLine(object): + def __init__( + self, + filename, # type: str + lineno, # type: int + comes_from, # type: str + args, # type: str + opts, # type: Values + constraint, # type: bool + ): + # type: (...) -> None + self.filename = filename + self.lineno = lineno + self.comes_from = comes_from + self.args = args + self.opts = opts + self.constraint = constraint + + +def parse_requirements( + filename, # type: str + session, # type: PipSession + finder=None, # type: Optional[PackageFinder] + comes_from=None, # type: Optional[str] + options=None, # type: Optional[optparse.Values] + constraint=False, # type: bool + wheel_cache=None, # type: Optional[WheelCache] + use_pep517=None # type: Optional[bool] +): + # type: (...) -> Iterator[InstallRequirement] + """Parse a requirements file and yield InstallRequirement instances. + + :param filename: Path or url of requirements file. + :param session: PipSession instance. + :param finder: Instance of pip.index.PackageFinder. + :param comes_from: Origin description of requirements. + :param options: cli options. + :param constraint: If true, parsing a constraint file rather than + requirements file. + :param wheel_cache: Instance of pip.wheel.WheelCache + :param use_pep517: Value of the --use-pep517 option. + """ + skip_requirements_regex = ( + options.skip_requirements_regex if options else None + ) + line_parser = get_line_parser(finder) + parser = RequirementsFileParser( + session, line_parser, comes_from, skip_requirements_regex + ) + + for parsed_line in parser.parse(filename, constraint): + req = handle_line( + parsed_line, finder, options, session, wheel_cache, use_pep517 + ) + if req is not None: + yield req + + +def preprocess(content, skip_requirements_regex): + # type: (Text, Optional[str]) -> ReqFileLines + """Split, filter, and join lines, and return a line iterator + + :param content: the content of the requirements file + :param options: cli options + """ + lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines + lines_enum = join_lines(lines_enum) + lines_enum = ignore_comments(lines_enum) + if skip_requirements_regex: + lines_enum = skip_regex(lines_enum, skip_requirements_regex) + lines_enum = expand_env_variables(lines_enum) + return lines_enum + + +def handle_line( + line, # type: ParsedLine + finder=None, # type: Optional[PackageFinder] + options=None, # type: Optional[optparse.Values] + session=None, # type: Optional[PipSession] + wheel_cache=None, # type: Optional[WheelCache] + use_pep517=None, # type: Optional[bool] +): + # type: (...) -> Optional[InstallRequirement] + """Handle a single parsed requirements line; This can result in + creating/yielding requirements, or updating the finder. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + """ + + # preserve for the nested code path + line_comes_from = '%s %s (line %s)' % ( + '-c' if line.constraint else '-r', line.filename, line.lineno, + ) + + # return a line requirement + if line.args: + isolated = options.isolated_mode if options else False + if options: + cmdoptions.check_install_build_global(options, line.opts) + # get the options that apply to requirements + req_options = {} + for dest in SUPPORTED_OPTIONS_REQ_DEST: + if dest in line.opts.__dict__ and line.opts.__dict__[dest]: + req_options[dest] = line.opts.__dict__[dest] + line_source = 'line {} of {}'.format(line.lineno, line.filename) + return install_req_from_line( + line.args, + comes_from=line_comes_from, + use_pep517=use_pep517, + isolated=isolated, + options=req_options, + wheel_cache=wheel_cache, + constraint=line.constraint, + line_source=line_source, + ) + + # return an editable requirement + elif line.opts.editables: + isolated = options.isolated_mode if options else False + return install_req_from_editable( + line.opts.editables[0], comes_from=line_comes_from, + use_pep517=use_pep517, + constraint=line.constraint, isolated=isolated, + wheel_cache=wheel_cache + ) + + # percolate hash-checking option upward + elif line.opts.require_hashes: + options.require_hashes = line.opts.require_hashes + + # set finder options + elif finder: + find_links = finder.find_links + index_urls = finder.index_urls + if line.opts.index_url: + index_urls = [line.opts.index_url] + if line.opts.no_index is True: + index_urls = [] + if line.opts.extra_index_urls: + index_urls.extend(line.opts.extra_index_urls) + if line.opts.find_links: + # FIXME: it would be nice to keep track of the source + # of the find_links: support a find-links local path + # relative to a requirements file. + value = line.opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(line.filename)) + relative_to_reqs_file = os.path.join(req_dir, value) + if os.path.exists(relative_to_reqs_file): + value = relative_to_reqs_file + find_links.append(value) + + search_scope = SearchScope( + find_links=find_links, + index_urls=index_urls, + ) + finder.search_scope = search_scope + + if line.opts.pre: + finder.set_allow_all_prereleases() + + if session: + for host in line.opts.trusted_hosts or []: + source = 'line {} of {}'.format(line.lineno, line.filename) + session.add_trusted_host(host, source=source) + + return None + + +class RequirementsFileParser(object): + def __init__( + self, + session, # type: PipSession + line_parser, # type: LineParser + comes_from, # type: str + skip_requirements_regex, # type: Optional[str] + ): + # type: (...) -> None + self._session = session + self._line_parser = line_parser + self._comes_from = comes_from + self._skip_requirements_regex = skip_requirements_regex + + def parse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + """Parse a given file, yielding parsed lines. + """ + for line in self._parse_and_recurse(filename, constraint): + yield line + + def _parse_and_recurse(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + for line in self._parse_file(filename, constraint): + if ( + not line.args and + not line.opts.editables and + (line.opts.requirements or line.opts.constraints) + ): + # parse a nested requirements file + if line.opts.requirements: + req_path = line.opts.requirements[0] + nested_constraint = False + else: + req_path = line.opts.constraints[0] + nested_constraint = True + + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib_parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join( + os.path.dirname(filename), req_path, + ) + + for inner_line in self._parse_and_recurse( + req_path, nested_constraint, + ): + yield inner_line + else: + yield line + + def _parse_file(self, filename, constraint): + # type: (str, bool) -> Iterator[ParsedLine] + _, content = get_file_content( + filename, self._session, comes_from=self._comes_from + ) + + lines_enum = preprocess(content, self._skip_requirements_regex) + + for line_number, line in lines_enum: + try: + args_str, opts = self._line_parser(line) + except OptionParsingError as e: + # add offending line + msg = 'Invalid requirement: %s\n%s' % (line, e.msg) + raise RequirementsFileParseError(msg) + + yield ParsedLine( + filename, + line_number, + self._comes_from, + args_str, + opts, + constraint, + ) + + +def get_line_parser(finder): + # type: (Optional[PackageFinder]) -> LineParser + def parse_line(line): + # type: (Text) -> Tuple[str, Values] + # Build new parser for each line since it accumulates appendable + # options. + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + defaults.format_control = finder.format_control + + args_str, options_str = break_args_options(line) + # Prior to 2.7.3, shlex cannot deal with unicode entries + if sys.version_info < (2, 7, 3): + # https://github.com/python/mypy/issues/1174 + options_str = options_str.encode('utf8') # type: ignore + + # https://github.com/python/mypy/issues/1174 + opts, _ = parser.parse_args( + shlex.split(options_str), defaults) # type: ignore + + return args_str, opts + + return parse_line + + +def break_args_options(line): + # type: (Text) -> Tuple[str, Text] + """Break up the line into an args and options string. We only want to shlex + (and then optparse) the options, not the args. args can contain markers + which are corrupted by shlex. + """ + tokens = line.split(' ') + args = [] + options = tokens[:] + for token in tokens: + if token.startswith('-') or token.startswith('--'): + break + else: + args.append(token) + options.pop(0) + return ' '.join(args), ' '.join(options) # type: ignore + + +class OptionParsingError(Exception): + def __init__(self, msg): + # type: (str) -> None + self.msg = msg + + +def build_parser(): + # type: () -> optparse.OptionParser + """ + Return a parser for parsing requirement lines + """ + parser = optparse.OptionParser(add_help_option=False) + + option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ + for option_factory in option_factories: + option = option_factory() + parser.add_option(option) + + # By default optparse sys.exits on parsing errors. We want to wrap + # that in our own exception. + def parser_exit(self, msg): + # type: (Any, str) -> NoReturn + raise OptionParsingError(msg) + # NOTE: mypy disallows assigning to a method + # https://github.com/python/mypy/issues/2427 + parser.exit = parser_exit # type: ignore + + return parser + + +def join_lines(lines_enum): + # type: (ReqFileLines) -> ReqFileLines + """Joins a line ending in '\' with the previous line (except when following + comments). The joined line takes on the index of the first line. + """ + primary_line_number = None + new_line = [] # type: List[Text] + for line_number, line in lines_enum: + if not line.endswith('\\') or COMMENT_RE.match(line): + if COMMENT_RE.match(line): + # this ensures comments are always matched later + line = ' ' + line + if new_line: + new_line.append(line) + yield primary_line_number, ''.join(new_line) + new_line = [] + else: + yield line_number, line + else: + if not new_line: + primary_line_number = line_number + new_line.append(line.strip('\\')) + + # last line contains \ + if new_line: + yield primary_line_number, ''.join(new_line) + + # TODO: handle space after '\'. + + +def ignore_comments(lines_enum): + # type: (ReqFileLines) -> ReqFileLines + """ + Strips comments and filter empty lines. + """ + for line_number, line in lines_enum: + line = COMMENT_RE.sub('', line) + line = line.strip() + if line: + yield line_number, line + + +def skip_regex(lines_enum, pattern): + # type: (ReqFileLines, str) -> ReqFileLines + """ + Skip lines that match the provided pattern + + Note: the regex pattern is only built once + """ + matcher = re.compile(pattern) + lines_enum = filterfalse(lambda e: matcher.search(e[1]), lines_enum) + return lines_enum + + +def expand_env_variables(lines_enum): + # type: (ReqFileLines) -> ReqFileLines + """Replace all environment variables that can be retrieved via `os.getenv`. + + The only allowed format for environment variables defined in the + requirement file is `${MY_VARIABLE_1}` to ensure two things: + + 1. Strings that contain a `$` aren't accidentally (partially) expanded. + 2. Ensure consistency across platforms for requirement files. + + These points are the result of a discussion on the `github pull + request #3514 <https://github.com/pypa/pip/pull/3514>`_. + + Valid characters in variable names follow the `POSIX standard + <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited + to uppercase letter, digits and the `_` (underscore). + """ + for line_number, line in lines_enum: + for env_var, var_name in ENV_VAR_RE.findall(line): + value = os.getenv(var_name) + if not value: + continue + + line = line.replace(env_var, value) + + yield line_number, line + + +def get_file_content(url, session, comes_from=None): + # type: (str, PipSession, Optional[str]) -> Tuple[str, Text] + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode. + Respects # -*- coding: declarations on the retrieved files. + + :param url: File path or url. + :param session: PipSession instance. + :param comes_from: Origin description of requirements. + """ + scheme = get_url_scheme(url) + + if scheme in ['http', 'https']: + # FIXME: catch some errors + resp = session.get(url) + resp.raise_for_status() + return resp.url, resp.text + + elif scheme == 'file': + if comes_from and comes_from.startswith('http'): + raise InstallationError( + 'Requirements file %s references URL %s, which is local' + % (comes_from, url)) + + path = url.split(':', 1)[1] + path = path.replace('\\', '/') + match = _url_slash_drive_re.match(path) + if match: + path = match.group(1) + ':' + path.split('|', 1)[1] + path = urllib_parse.unquote(path) + if path.startswith('/'): + path = '/' + path.lstrip('/') + url = path + + try: + with open(url, 'rb') as f: + content = auto_decode(f.read()) + except IOError as exc: + raise InstallationError( + 'Could not open requirements file: %s' % str(exc) + ) + return url, content + + +_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_install.py b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_install.py new file mode 100644 index 0000000000000000000000000000000000000000..22ac24b96d361e8202979e3cbb23309792f7e090 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_install.py @@ -0,0 +1,830 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import logging +import os +import shutil +import sys +import zipfile + +from pip._vendor import pkg_resources, six +from pip._vendor.packaging.requirements import Requirement +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.pep517.wrappers import Pep517HookCaller + +from pip._internal import pep425tags +from pip._internal.build_env import NoOpBuildEnvironment +from pip._internal.exceptions import InstallationError +from pip._internal.locations import get_scheme +from pip._internal.models.link import Link +from pip._internal.operations.build.metadata import generate_metadata +from pip._internal.operations.build.metadata_legacy import \ + generate_metadata as generate_metadata_legacy +from pip._internal.operations.install.editable_legacy import \ + install_editable as install_editable_legacy +from pip._internal.operations.install.legacy import install as install_legacy +from pip._internal.operations.install.wheel import install_wheel +from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path +from pip._internal.req.req_uninstall import UninstallPathSet +from pip._internal.utils.deprecation import deprecated +from pip._internal.utils.hashes import Hashes +from pip._internal.utils.logging import indent_log +from pip._internal.utils.marker_files import ( + PIP_DELETE_MARKER_FILENAME, + has_delete_marker_file, + write_delete_marker_file, +) +from pip._internal.utils.misc import ( + ask_path_exists, + backup_dir, + display_path, + dist_in_site_packages, + dist_in_usersite, + get_installed_version, + hide_url, + redact_auth_from_url, + rmtree, +) +from pip._internal.utils.packaging import get_metadata +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.virtualenv import running_under_virtualenv +from pip._internal.vcs import vcs + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Dict, Iterable, List, Optional, Sequence, Union, + ) + from pip._internal.build_env import BuildEnvironment + from pip._internal.cache import WheelCache + from pip._internal.index.package_finder import PackageFinder + from pip._vendor.pkg_resources import Distribution + from pip._vendor.packaging.specifiers import SpecifierSet + from pip._vendor.packaging.markers import Marker + + +logger = logging.getLogger(__name__) + + +def _get_dist(metadata_directory): + # type: (str) -> Distribution + """Return a pkg_resources.Distribution for the provided + metadata directory. + """ + dist_dir = metadata_directory.rstrip(os.sep) + + # Determine the correct Distribution object type. + if dist_dir.endswith(".egg-info"): + dist_cls = pkg_resources.Distribution + else: + assert dist_dir.endswith(".dist-info") + dist_cls = pkg_resources.DistInfoDistribution + + # Build a PathMetadata object, from path to metadata. :wink: + base_dir, dist_dir_name = os.path.split(dist_dir) + dist_name = os.path.splitext(dist_dir_name)[0] + metadata = pkg_resources.PathMetadata(base_dir, dist_dir) + + return dist_cls( + base_dir, + project_name=dist_name, + metadata=metadata, + ) + + +class InstallRequirement(object): + """ + Represents something that may be installed later on, may have information + about where to fetch the relevant requirement and also contains logic for + installing the said requirement. + """ + + def __init__( + self, + req, # type: Optional[Requirement] + comes_from, # type: Optional[Union[str, InstallRequirement]] + source_dir=None, # type: Optional[str] + editable=False, # type: bool + link=None, # type: Optional[Link] + markers=None, # type: Optional[Marker] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Dict[str, Any]] + wheel_cache=None, # type: Optional[WheelCache] + constraint=False, # type: bool + extras=() # type: Iterable[str] + ): + # type: (...) -> None + assert req is None or isinstance(req, Requirement), req + self.req = req + self.comes_from = comes_from + self.constraint = constraint + if source_dir is None: + self.source_dir = None # type: Optional[str] + else: + self.source_dir = os.path.normpath(os.path.abspath(source_dir)) + self.editable = editable + + self._wheel_cache = wheel_cache + if link is None and req and req.url: + # PEP 508 URL requirement + link = Link(req.url) + self.link = self.original_link = link + # Path to any downloaded or already-existing package. + self.local_file_path = None # type: Optional[str] + if self.link and self.link.is_file: + self.local_file_path = self.link.file_path + + if extras: + self.extras = extras + elif req: + self.extras = { + pkg_resources.safe_extra(extra) for extra in req.extras + } + else: + self.extras = set() + if markers is None and req: + markers = req.marker + self.markers = markers + + # This holds the pkg_resources.Distribution object if this requirement + # is already available: + self.satisfied_by = None # type: Optional[Distribution] + # Whether the installation process should try to uninstall an existing + # distribution before installing this requirement. + self.should_reinstall = False + # Temporary build location + self._temp_build_dir = None # type: Optional[TempDirectory] + # Set to True after successful installation + self.install_succeeded = None # type: Optional[bool] + self.options = options if options else {} + # Set to True after successful preparation of this requirement + self.prepared = False + self.is_direct = False + + self.isolated = isolated + self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment + + # For PEP 517, the directory where we request the project metadata + # gets stored. We need this to pass to build_wheel, so the backend + # can ensure that the wheel matches the metadata (see the PEP for + # details). + self.metadata_directory = None # type: Optional[str] + + # The static build requirements (from pyproject.toml) + self.pyproject_requires = None # type: Optional[List[str]] + + # Build requirements that we will check are available + self.requirements_to_check = [] # type: List[str] + + # The PEP 517 backend we should use to build the project + self.pep517_backend = None # type: Optional[Pep517HookCaller] + + # Are we using PEP 517 for this requirement? + # After pyproject.toml has been loaded, the only valid values are True + # and False. Before loading, None is valid (meaning "use the default"). + # Setting an explicit value before loading pyproject.toml is supported, + # but after loading this flag should be treated as read only. + self.use_pep517 = use_pep517 + + def __str__(self): + # type: () -> str + if self.req: + s = str(self.req) + if self.link: + s += ' from %s' % redact_auth_from_url(self.link.url) + elif self.link: + s = redact_auth_from_url(self.link.url) + else: + s = '<InstallRequirement>' + if self.satisfied_by is not None: + s += ' in %s' % display_path(self.satisfied_by.location) + if self.comes_from: + if isinstance(self.comes_from, six.string_types): + comes_from = self.comes_from # type: Optional[str] + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += ' (from %s)' % comes_from + return s + + def __repr__(self): + # type: () -> str + return '<%s object: %s editable=%r>' % ( + self.__class__.__name__, str(self), self.editable) + + def format_debug(self): + # type: () -> str + """An un-tested helper for getting state, for debugging. + """ + attributes = vars(self) + names = sorted(attributes) + + state = ( + "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names) + ) + return '<{name} object: {{{state}}}>'.format( + name=self.__class__.__name__, + state=", ".join(state), + ) + + def populate_link(self, finder, upgrade, require_hashes): + # type: (PackageFinder, bool, bool) -> None + """Ensure that if a link can be found for this, that it is found. + + Note that self.link may still be None - if Upgrade is False and the + requirement is already installed. + + If require_hashes is True, don't use the wheel cache, because cached + wheels, always built locally, have different hashes than the files + downloaded from the index server and thus throw false hash mismatches. + Furthermore, cached wheels at present have undeterministic contents due + to file modification times. + """ + if self.link is None: + self.link = finder.find_requirement(self, upgrade) + if self._wheel_cache is not None and not require_hashes: + old_link = self.link + supported_tags = pep425tags.get_supported() + self.link = self._wheel_cache.get( + link=self.link, + package_name=self.name, + supported_tags=supported_tags, + ) + if old_link != self.link: + logger.debug('Using cached wheel link: %s', self.link) + + # Things that are valid for all kinds of requirements? + @property + def name(self): + # type: () -> Optional[str] + if self.req is None: + return None + return six.ensure_str(pkg_resources.safe_name(self.req.name)) + + @property + def specifier(self): + # type: () -> SpecifierSet + return self.req.specifier + + @property + def is_pinned(self): + # type: () -> bool + """Return whether I am pinned to an exact version. + + For example, some-package==1.2 is pinned; some-package>1.2 is not. + """ + specifiers = self.specifier + return (len(specifiers) == 1 and + next(iter(specifiers)).operator in {'==', '==='}) + + @property + def installed_version(self): + # type: () -> Optional[str] + return get_installed_version(self.name) + + def match_markers(self, extras_requested=None): + # type: (Optional[Iterable[str]]) -> bool + if not extras_requested: + # Provide an extra to safely evaluate the markers + # without matching any extra + extras_requested = ('',) + if self.markers is not None: + return any( + self.markers.evaluate({'extra': extra}) + for extra in extras_requested) + else: + return True + + @property + def has_hash_options(self): + # type: () -> bool + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.options.get('hashes', {})) + + def hashes(self, trust_internet=True): + # type: (bool) -> Hashes + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.options.get('hashes', {}).copy() + link = self.link if trust_internet else self.original_link + if link and link.hash: + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + + def from_path(self): + # type: () -> Optional[str] + """Format a nice indicator to show where this "comes from" + """ + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + if isinstance(self.comes_from, six.string_types): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += '->' + comes_from + return s + + def ensure_build_location(self, build_dir): + # type: (str) -> str + assert build_dir is not None + if self._temp_build_dir is not None: + assert self._temp_build_dir.path + return self._temp_build_dir.path + if self.req is None: + # Some systems have /tmp as a symlink which confuses custom + # builds (such as numpy). Thus, we ensure that the real path + # is returned. + self._temp_build_dir = TempDirectory(kind="req-build") + + return self._temp_build_dir.path + if self.editable: + name = self.name.lower() + else: + name = self.name + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) + if not os.path.exists(build_dir): + logger.debug('Creating directory %s', build_dir) + os.makedirs(build_dir) + write_delete_marker_file(build_dir) + return os.path.join(build_dir, name) + + def _set_requirement(self): + # type: () -> None + """Set requirement after generating metadata. + """ + assert self.req is None + assert self.metadata is not None + assert self.source_dir is not None + + # Construct a Requirement object from the generated metadata + if isinstance(parse_version(self.metadata["Version"]), Version): + op = "==" + else: + op = "===" + + self.req = Requirement( + "".join([ + self.metadata["Name"], + op, + self.metadata["Version"], + ]) + ) + + def warn_on_mismatching_name(self): + # type: () -> None + metadata_name = canonicalize_name(self.metadata["Name"]) + if canonicalize_name(self.req.name) == metadata_name: + # Everything is fine. + return + + # If we're here, there's a mismatch. Log a warning about it. + logger.warning( + 'Generating metadata for package %s ' + 'produced metadata for project name %s. Fix your ' + '#egg=%s fragments.', + self.name, metadata_name, self.name + ) + self.req = Requirement(metadata_name) + + def remove_temporary_source(self): + # type: () -> None + """Remove the source files from this requirement, if they are marked + for deletion""" + if self.source_dir and has_delete_marker_file(self.source_dir): + logger.debug('Removing source in %s', self.source_dir) + rmtree(self.source_dir) + self.source_dir = None + if self._temp_build_dir: + self._temp_build_dir.cleanup() + self._temp_build_dir = None + self.build_env.cleanup() + + def check_if_exists(self, use_user_site): + # type: (bool) -> None + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.should_reinstall appropriately. + """ + if self.req is None: + return + # get_distribution() will resolve the entire list of requirements + # anyway, and we've already determined that we need the requirement + # in question, so strip the marker so that we don't try to + # evaluate it. + no_marker = Requirement(str(self.req)) + no_marker.marker = None + try: + self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) + except pkg_resources.DistributionNotFound: + return + except pkg_resources.VersionConflict: + existing_dist = pkg_resources.get_distribution( + self.req.name + ) + if use_user_site: + if dist_in_usersite(existing_dist): + self.should_reinstall = True + elif (running_under_virtualenv() and + dist_in_site_packages(existing_dist)): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to %s in %s" % + (existing_dist.project_name, existing_dist.location) + ) + else: + self.should_reinstall = True + else: + if self.editable and self.satisfied_by: + self.should_reinstall = True + # when installing editables, nothing pre-existing should ever + # satisfy + self.satisfied_by = None + + # Things valid for wheels + @property + def is_wheel(self): + # type: () -> bool + if not self.link: + return False + return self.link.is_wheel + + # Things valid for sdists + @property + def unpacked_source_directory(self): + # type: () -> str + return os.path.join( + self.source_dir, + self.link and self.link.subdirectory_fragment or '') + + @property + def setup_py_path(self): + # type: () -> str + assert self.source_dir, "No source dir for %s" % self + setup_py = os.path.join(self.unpacked_source_directory, 'setup.py') + + # Python2 __file__ should not be unicode + if six.PY2 and isinstance(setup_py, six.text_type): + setup_py = setup_py.encode(sys.getfilesystemencoding()) + + return setup_py + + @property + def pyproject_toml_path(self): + # type: () -> str + assert self.source_dir, "No source dir for %s" % self + return make_pyproject_path(self.unpacked_source_directory) + + def load_pyproject_toml(self): + # type: () -> None + """Load the pyproject.toml file. + + After calling this routine, all of the attributes related to PEP 517 + processing for this requirement have been set. In particular, the + use_pep517 attribute can be used to determine whether we should + follow the PEP 517 or legacy (setup.py) code path. + """ + pyproject_toml_data = load_pyproject_toml( + self.use_pep517, + self.pyproject_toml_path, + self.setup_py_path, + str(self) + ) + + if pyproject_toml_data is None: + self.use_pep517 = False + return + + self.use_pep517 = True + requires, backend, check, backend_path = pyproject_toml_data + self.requirements_to_check = check + self.pyproject_requires = requires + self.pep517_backend = Pep517HookCaller( + self.unpacked_source_directory, backend, backend_path=backend_path, + ) + + def _generate_metadata(self): + # type: () -> str + """Invokes metadata generator functions, with the required arguments. + """ + if not self.use_pep517: + assert self.unpacked_source_directory + + return generate_metadata_legacy( + build_env=self.build_env, + setup_py_path=self.setup_py_path, + source_dir=self.unpacked_source_directory, + editable=self.editable, + isolated=self.isolated, + details=self.name or "from {}".format(self.link) + ) + + assert self.pep517_backend is not None + + return generate_metadata( + build_env=self.build_env, + backend=self.pep517_backend, + ) + + def prepare_metadata(self): + # type: () -> None + """Ensure that project metadata is available. + + Under PEP 517, call the backend hook to prepare the metadata. + Under legacy processing, call setup.py egg-info. + """ + assert self.source_dir + + with indent_log(): + self.metadata_directory = self._generate_metadata() + + # Act on the newly generated metadata, based on the name and version. + if not self.name: + self._set_requirement() + else: + self.warn_on_mismatching_name() + + self.assert_source_matches_version() + + @property + def metadata(self): + # type: () -> Any + if not hasattr(self, '_metadata'): + self._metadata = get_metadata(self.get_dist()) + + return self._metadata + + def get_dist(self): + # type: () -> Distribution + return _get_dist(self.metadata_directory) + + def assert_source_matches_version(self): + # type: () -> None + assert self.source_dir + version = self.metadata['version'] + if self.req.specifier and version not in self.req.specifier: + logger.warning( + 'Requested %s, but installing version %s', + self, + version, + ) + else: + logger.debug( + 'Source in %s has version %s, which satisfies requirement %s', + display_path(self.source_dir), + version, + self, + ) + + # For both source distributions and editables + def ensure_has_source_dir(self, parent_dir): + # type: (str) -> None + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.ensure_build_location(parent_dir) + + # For editable installations + def update_editable(self, obtain=True): + # type: (bool) -> None + if not self.link: + logger.debug( + "Cannot update repository at %s; repository location is " + "unknown", + self.source_dir, + ) + return + assert self.editable + assert self.source_dir + if self.link.scheme == 'file': + # Static paths don't get updated + return + assert '+' in self.link.url, "bad url: %r" % self.link.url + vc_type, url = self.link.url.split('+', 1) + vcs_backend = vcs.get_backend(vc_type) + if vcs_backend: + if not self.link.is_vcs: + reason = ( + "This form of VCS requirement is being deprecated: {}." + ).format( + self.link.url + ) + replacement = None + if self.link.url.startswith("git+git@"): + replacement = ( + "git+https://git@example.com/..., " + "git+ssh://git@example.com/..., " + "or the insecure git+git://git@example.com/..." + ) + deprecated(reason, replacement, gone_in="21.0", issue=7554) + hidden_url = hide_url(self.link.url) + if obtain: + vcs_backend.obtain(self.source_dir, url=hidden_url) + else: + vcs_backend.export(self.source_dir, url=hidden_url) + else: + assert 0, ( + 'Unexpected version control type (in %s): %s' + % (self.link, vc_type)) + + # Top-level Actions + def uninstall(self, auto_confirm=False, verbose=False): + # type: (bool, bool) -> Optional[UninstallPathSet] + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + assert self.req + try: + dist = pkg_resources.get_distribution(self.req.name) + except pkg_resources.DistributionNotFound: + logger.warning("Skipping %s as it is not installed.", self.name) + return None + else: + logger.info('Found existing installation: %s', dist) + + uninstalled_pathset = UninstallPathSet.from_dist(dist) + uninstalled_pathset.remove(auto_confirm, verbose) + return uninstalled_pathset + + def _get_archive_name(self, path, parentdir, rootdir): + # type: (str, str, str) -> str + + def _clean_zip_name(name, prefix): + # type: (str, str) -> str + assert name.startswith(prefix + os.path.sep), ( + "name %r doesn't start with prefix %r" % (name, prefix) + ) + name = name[len(prefix) + 1:] + name = name.replace(os.path.sep, '/') + return name + + path = os.path.join(parentdir, path) + name = _clean_zip_name(path, rootdir) + return self.name + '/' + name + + def archive(self, build_dir): + # type: (str) -> None + """Saves archive to provided build_dir. + + Used for saving downloaded VCS requirements as part of `pip download`. + """ + assert self.source_dir + + create_archive = True + archive_name = '%s-%s.zip' % (self.name, self.metadata["version"]) + archive_path = os.path.join(build_dir, archive_name) + + if os.path.exists(archive_path): + response = ask_path_exists( + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)bort ' % + display_path(archive_path), ('i', 'w', 'b', 'a')) + if response == 'i': + create_archive = False + elif response == 'w': + logger.warning('Deleting %s', display_path(archive_path)) + os.remove(archive_path) + elif response == 'b': + dest_file = backup_dir(archive_path) + logger.warning( + 'Backing up %s to %s', + display_path(archive_path), + display_path(dest_file), + ) + shutil.move(archive_path, dest_file) + elif response == 'a': + sys.exit(-1) + + if not create_archive: + return + + zip_output = zipfile.ZipFile( + archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True, + ) + with zip_output: + dir = os.path.normcase( + os.path.abspath(self.unpacked_source_directory) + ) + for dirpath, dirnames, filenames in os.walk(dir): + if 'pip-egg-info' in dirnames: + dirnames.remove('pip-egg-info') + for dirname in dirnames: + dir_arcname = self._get_archive_name( + dirname, parentdir=dirpath, rootdir=dir, + ) + zipdir = zipfile.ZipInfo(dir_arcname + '/') + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip_output.writestr(zipdir, '') + for filename in filenames: + if filename == PIP_DELETE_MARKER_FILENAME: + continue + file_arcname = self._get_archive_name( + filename, parentdir=dirpath, rootdir=dir, + ) + filename = os.path.join(dirpath, filename) + zip_output.write(filename, file_arcname) + + logger.info('Saved %s', display_path(archive_path)) + + def install( + self, + install_options, # type: List[str] + global_options=None, # type: Optional[Sequence[str]] + root=None, # type: Optional[str] + home=None, # type: Optional[str] + prefix=None, # type: Optional[str] + warn_script_location=True, # type: bool + use_user_site=False, # type: bool + pycompile=True # type: bool + ): + # type: (...) -> None + scheme = get_scheme( + self.name, + user=use_user_site, + home=home, + root=root, + isolated=self.isolated, + prefix=prefix, + ) + + global_options = global_options if global_options is not None else [] + if self.editable: + install_editable_legacy( + install_options, + global_options, + prefix=prefix, + home=home, + use_user_site=use_user_site, + name=self.name, + setup_py_path=self.setup_py_path, + isolated=self.isolated, + build_env=self.build_env, + unpacked_source_directory=self.unpacked_source_directory, + ) + self.install_succeeded = True + return + + if self.is_wheel: + assert self.local_file_path + install_wheel( + self.name, + self.local_file_path, + scheme=scheme, + req_description=str(self.req), + pycompile=pycompile, + warn_script_location=warn_script_location, + ) + self.install_succeeded = True + return + + install_legacy( + self, + install_options=install_options, + global_options=global_options, + root=root, + home=home, + prefix=prefix, + use_user_site=use_user_site, + pycompile=pycompile, + scheme=scheme, + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_set.py b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_set.py new file mode 100644 index 0000000000000000000000000000000000000000..087ac5925f52c99345cffe693d6a392e39bd70c4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_set.py @@ -0,0 +1,209 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import logging +from collections import OrderedDict + +from pip._vendor.packaging.utils import canonicalize_name + +from pip._internal import pep425tags +from pip._internal.exceptions import InstallationError +from pip._internal.models.wheel import Wheel +from pip._internal.utils.logging import indent_log +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, Iterable, List, Optional, Tuple + from pip._internal.req.req_install import InstallRequirement + + +logger = logging.getLogger(__name__) + + +class RequirementSet(object): + + def __init__(self, check_supported_wheels=True): + # type: (bool) -> None + """Create a RequirementSet. + """ + + self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501 + self.check_supported_wheels = check_supported_wheels + + self.unnamed_requirements = [] # type: List[InstallRequirement] + self.successfully_downloaded = [] # type: List[InstallRequirement] + self.reqs_to_cleanup = [] # type: List[InstallRequirement] + + def __str__(self): + # type: () -> str + requirements = sorted( + (req for req in self.requirements.values() if not req.comes_from), + key=lambda req: canonicalize_name(req.name), + ) + return ' '.join(str(req.req) for req in requirements) + + def __repr__(self): + # type: () -> str + requirements = sorted( + self.requirements.values(), + key=lambda req: canonicalize_name(req.name), + ) + + format_string = '<{classname} object; {count} requirement(s): {reqs}>' + return format_string.format( + classname=self.__class__.__name__, + count=len(requirements), + reqs=', '.join(str(req.req) for req in requirements), + ) + + def add_unnamed_requirement(self, install_req): + # type: (InstallRequirement) -> None + assert not install_req.name + self.unnamed_requirements.append(install_req) + + def add_named_requirement(self, install_req): + # type: (InstallRequirement) -> None + assert install_req.name + + project_name = canonicalize_name(install_req.name) + self.requirements[project_name] = install_req + + def add_requirement( + self, + install_req, # type: InstallRequirement + parent_req_name=None, # type: Optional[str] + extras_requested=None # type: Optional[Iterable[str]] + ): + # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501 + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :param extras_requested: an iterable of extras used to evaluate the + environment markers. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + # If the markers do not match, ignore this requirement. + if not install_req.match_markers(extras_requested): + logger.info( + "Ignoring %s: markers '%s' don't match your environment", + install_req.name, install_req.markers, + ) + return [], None + + # If the wheel is not supported, raise an error. + # Should check this after filtering out based on environment markers to + # allow specifying different wheels based on the environment/OS, in a + # single requirements file. + if install_req.link and install_req.link.is_wheel: + wheel = Wheel(install_req.link.filename) + tags = pep425tags.get_supported() + if (self.check_supported_wheels and not wheel.supported(tags)): + raise InstallationError( + "%s is not a supported wheel on this platform." % + wheel.filename + ) + + # This next bit is really a sanity check. + assert install_req.is_direct == (parent_req_name is None), ( + "a direct req shouldn't have a parent and also, " + "a non direct req should have a parent" + ) + + # Unnamed requirements are scanned again and the requirement won't be + # added as a dependency until after scanning. + if not install_req.name: + self.add_unnamed_requirement(install_req) + return [install_req], None + + try: + existing_req = self.get_requirement(install_req.name) + except KeyError: + existing_req = None + + has_conflicting_requirement = ( + parent_req_name is None and + existing_req and + not existing_req.constraint and + existing_req.extras == install_req.extras and + existing_req.req.specifier != install_req.req.specifier + ) + if has_conflicting_requirement: + raise InstallationError( + "Double requirement given: %s (already in %s, name=%r)" + % (install_req, existing_req, install_req.name) + ) + + # When no existing requirement exists, add the requirement as a + # dependency and it will be scanned again after. + if not existing_req: + self.add_named_requirement(install_req) + # We'd want to rescan this requirement later + return [install_req], install_req + + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + if install_req.constraint or not existing_req.constraint: + return [], existing_req + + does_not_satisfy_constraint = ( + install_req.link and + not ( + existing_req.link and + install_req.link.path == existing_req.link.path + ) + ) + if does_not_satisfy_constraint: + self.reqs_to_cleanup.append(install_req) + raise InstallationError( + "Could not satisfy constraints for '%s': " + "installation from path or url cannot be " + "constrained to a version" % install_req.name, + ) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + existing_req.extras = tuple(sorted( + set(existing_req.extras) | set(install_req.extras) + )) + logger.debug( + "Setting %s extras to: %s", + existing_req, existing_req.extras, + ) + # Return the existing requirement for addition to the parent and + # scanning again. + return [existing_req], existing_req + + def has_requirement(self, name): + # type: (str) -> bool + project_name = canonicalize_name(name) + + return ( + project_name in self.requirements and + not self.requirements[project_name].constraint + ) + + def get_requirement(self, name): + # type: (str) -> InstallRequirement + project_name = canonicalize_name(name) + + if project_name in self.requirements: + return self.requirements[project_name] + + raise KeyError("No project with the name %r" % name) + + def cleanup_files(self): + # type: () -> None + """Clean up files, remove builds.""" + logger.debug('Cleaning up...') + with indent_log(): + for req in self.reqs_to_cleanup: + req.remove_temporary_source() diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_tracker.py b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..84e0c0419fc7064b05b2de7507a38aeba3c2dfad --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_tracker.py @@ -0,0 +1,150 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import contextlib +import errno +import hashlib +import logging +import os + +from pip._vendor import contextlib2 + +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from types import TracebackType + from typing import Dict, Iterator, Optional, Set, Type, Union + from pip._internal.req.req_install import InstallRequirement + from pip._internal.models.link import Link + +logger = logging.getLogger(__name__) + + +@contextlib.contextmanager +def update_env_context_manager(**changes): + # type: (str) -> Iterator[None] + target = os.environ + + # Save values from the target and change them. + non_existent_marker = object() + saved_values = {} # type: Dict[str, Union[object, str]] + for name, new_value in changes.items(): + try: + saved_values[name] = target[name] + except KeyError: + saved_values[name] = non_existent_marker + target[name] = new_value + + try: + yield + finally: + # Restore original values in the target. + for name, original_value in saved_values.items(): + if original_value is non_existent_marker: + del target[name] + else: + assert isinstance(original_value, str) # for mypy + target[name] = original_value + + +@contextlib.contextmanager +def get_requirement_tracker(): + # type: () -> Iterator[RequirementTracker] + root = os.environ.get('PIP_REQ_TRACKER') + with contextlib2.ExitStack() as ctx: + if root is None: + root = ctx.enter_context( + TempDirectory(kind='req-tracker') + ).path + ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root)) + logger.debug("Initialized build tracking at %s", root) + + with RequirementTracker(root) as tracker: + yield tracker + + +class RequirementTracker(object): + + def __init__(self, root): + # type: (str) -> None + self._root = root + self._entries = set() # type: Set[InstallRequirement] + logger.debug("Created build tracker: %s", self._root) + + def __enter__(self): + # type: () -> RequirementTracker + logger.debug("Entered build tracker: %s", self._root) + return self + + def __exit__( + self, + exc_type, # type: Optional[Type[BaseException]] + exc_val, # type: Optional[BaseException] + exc_tb # type: Optional[TracebackType] + ): + # type: (...) -> None + self.cleanup() + + def _entry_path(self, link): + # type: (Link) -> str + hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() + return os.path.join(self._root, hashed) + + def add(self, req): + # type: (InstallRequirement) -> None + """Add an InstallRequirement to build tracking. + """ + + # Get the file to write information about this requirement. + entry_path = self._entry_path(req.link) + + # Try reading from the file. If it exists and can be read from, a build + # is already in progress, so a LookupError is raised. + try: + with open(entry_path) as fp: + contents = fp.read() + except IOError as e: + # if the error is anything other than "file does not exist", raise. + if e.errno != errno.ENOENT: + raise + else: + message = '%s is already being built: %s' % (req.link, contents) + raise LookupError(message) + + # If we're here, req should really not be building already. + assert req not in self._entries + + # Start tracking this requirement. + with open(entry_path, 'w') as fp: + fp.write(str(req)) + self._entries.add(req) + + logger.debug('Added %s to build tracker %r', req, self._root) + + def remove(self, req): + # type: (InstallRequirement) -> None + """Remove an InstallRequirement from build tracking. + """ + + # Delete the created file and the corresponding entries. + os.unlink(self._entry_path(req.link)) + self._entries.remove(req) + + logger.debug('Removed %s from build tracker %r', req, self._root) + + def cleanup(self): + # type: () -> None + for req in set(self._entries): + self.remove(req) + + logger.debug("Removed build tracker: %r", self._root) + + @contextlib.contextmanager + def track(self, req): + # type: (InstallRequirement) -> Iterator[None] + self.add(req) + yield + self.remove(req) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py new file mode 100644 index 0000000000000000000000000000000000000000..5971b130ec029478d59ea1761630605deb4a8b39 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/req/req_uninstall.py @@ -0,0 +1,644 @@ +from __future__ import absolute_import + +import csv +import functools +import logging +import os +import sys +import sysconfig + +from pip._vendor import pkg_resources + +from pip._internal.exceptions import UninstallationError +from pip._internal.locations import bin_py, bin_user +from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + FakeFile, + ask, + dist_in_usersite, + dist_is_local, + egg_link_path, + is_local, + normalize_path, + renames, + rmtree, +) +from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple, + ) + from pip._vendor.pkg_resources import Distribution + +logger = logging.getLogger(__name__) + + +def _script_names(dist, script_name, is_gui): + # type: (Distribution, str, bool) -> List[str] + """Create the fully qualified name of the files created by + {console,gui}_scripts for the given ``dist``. + Returns the list of file names + """ + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + exe_name = os.path.join(bin_dir, script_name) + paths_to_remove = [exe_name] + if WINDOWS: + paths_to_remove.append(exe_name + '.exe') + paths_to_remove.append(exe_name + '.exe.manifest') + if is_gui: + paths_to_remove.append(exe_name + '-script.pyw') + else: + paths_to_remove.append(exe_name + '-script.py') + return paths_to_remove + + +def _unique(fn): + # type: (Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]] + @functools.wraps(fn) + def unique(*args, **kw): + # type: (Any, Any) -> Iterator[Any] + seen = set() # type: Set[Any] + for item in fn(*args, **kw): + if item not in seen: + seen.add(item) + yield item + return unique + + +@_unique +def uninstallation_paths(dist): + # type: (Distribution) -> Iterator[str] + """ + Yield all the uninstallation paths for dist based on RECORD-without-.py[co] + + Yield paths to all the files in RECORD. For each .py file in RECORD, add + the .pyc and .pyo in the same directory. + + UninstallPathSet.add() takes care of the __pycache__ .py[co]. + """ + r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) + for row in r: + path = os.path.join(dist.location, row[0]) + yield path + if path.endswith('.py'): + dn, fn = os.path.split(path) + base = fn[:-3] + path = os.path.join(dn, base + '.pyc') + yield path + path = os.path.join(dn, base + '.pyo') + yield path + + +def compact(paths): + # type: (Iterable[str]) -> Set[str] + """Compact a path set to contain the minimal number of paths + necessary to contain all paths in the set. If /a/path/ and + /a/path/to/a/file.txt are both in the set, leave only the + shorter path.""" + + sep = os.path.sep + short_paths = set() # type: Set[str] + for path in sorted(paths, key=len): + should_skip = any( + path.startswith(shortpath.rstrip("*")) and + path[len(shortpath.rstrip("*").rstrip(sep))] == sep + for shortpath in short_paths + ) + if not should_skip: + short_paths.add(path) + return short_paths + + +def compress_for_rename(paths): + # type: (Iterable[str]) -> Set[str] + """Returns a set containing the paths that need to be renamed. + + This set may include directories when the original sequence of paths + included every file on disk. + """ + case_map = dict((os.path.normcase(p), p) for p in paths) + remaining = set(case_map) + unchecked = sorted(set(os.path.split(p)[0] + for p in case_map.values()), key=len) + wildcards = set() # type: Set[str] + + def norm_join(*a): + # type: (str) -> str + return os.path.normcase(os.path.join(*a)) + + for root in unchecked: + if any(os.path.normcase(root).startswith(w) + for w in wildcards): + # This directory has already been handled. + continue + + all_files = set() # type: Set[str] + all_subdirs = set() # type: Set[str] + for dirname, subdirs, files in os.walk(root): + all_subdirs.update(norm_join(root, dirname, d) + for d in subdirs) + all_files.update(norm_join(root, dirname, f) + for f in files) + # If all the files we found are in our remaining set of files to + # remove, then remove them from the latter set and add a wildcard + # for the directory. + if not (all_files - remaining): + remaining.difference_update(all_files) + wildcards.add(root + os.sep) + + return set(map(case_map.__getitem__, remaining)) | wildcards + + +def compress_for_output_listing(paths): + # type: (Iterable[str]) -> Tuple[Set[str], Set[str]] + """Returns a tuple of 2 sets of which paths to display to user + + The first set contains paths that would be deleted. Files of a package + are not added and the top-level directory of the package has a '*' added + at the end - to signify that all it's contents are removed. + + The second set contains files that would have been skipped in the above + folders. + """ + + will_remove = set(paths) + will_skip = set() + + # Determine folders and files + folders = set() + files = set() + for path in will_remove: + if path.endswith(".pyc"): + continue + if path.endswith("__init__.py") or ".dist-info" in path: + folders.add(os.path.dirname(path)) + files.add(path) + + # probably this one https://github.com/python/mypy/issues/390 + _normcased_files = set(map(os.path.normcase, files)) # type: ignore + + folders = compact(folders) + + # This walks the tree using os.walk to not miss extra folders + # that might get added. + for folder in folders: + for dirpath, _, dirfiles in os.walk(folder): + for fname in dirfiles: + if fname.endswith(".pyc"): + continue + + file_ = os.path.join(dirpath, fname) + if (os.path.isfile(file_) and + os.path.normcase(file_) not in _normcased_files): + # We are skipping this file. Add it to the set. + will_skip.add(file_) + + will_remove = files | { + os.path.join(folder, "*") for folder in folders + } + + return will_remove, will_skip + + +class StashedUninstallPathSet(object): + """A set of file rename operations to stash files while + tentatively uninstalling them.""" + def __init__(self): + # type: () -> None + # Mapping from source file root to [Adjacent]TempDirectory + # for files under that directory. + self._save_dirs = {} # type: Dict[str, TempDirectory] + # (old path, new path) tuples for each move that may need + # to be undone. + self._moves = [] # type: List[Tuple[str, str]] + + def _get_directory_stash(self, path): + # type: (str) -> str + """Stashes a directory. + + Directories are stashed adjacent to their original location if + possible, or else moved/copied into the user's temp dir.""" + + try: + save_dir = AdjacentTempDirectory(path) # type: TempDirectory + except OSError: + save_dir = TempDirectory(kind="uninstall") + self._save_dirs[os.path.normcase(path)] = save_dir + + return save_dir.path + + def _get_file_stash(self, path): + # type: (str) -> str + """Stashes a file. + + If no root has been provided, one will be created for the directory + in the user's temp directory.""" + path = os.path.normcase(path) + head, old_head = os.path.dirname(path), None + save_dir = None + + while head != old_head: + try: + save_dir = self._save_dirs[head] + break + except KeyError: + pass + head, old_head = os.path.dirname(head), head + else: + # Did not find any suitable root + head = os.path.dirname(path) + save_dir = TempDirectory(kind='uninstall') + self._save_dirs[head] = save_dir + + relpath = os.path.relpath(path, head) + if relpath and relpath != os.path.curdir: + return os.path.join(save_dir.path, relpath) + return save_dir.path + + def stash(self, path): + # type: (str) -> str + """Stashes the directory or file and returns its new location. + Handle symlinks as files to avoid modifying the symlink targets. + """ + path_is_dir = os.path.isdir(path) and not os.path.islink(path) + if path_is_dir: + new_path = self._get_directory_stash(path) + else: + new_path = self._get_file_stash(path) + + self._moves.append((path, new_path)) + if (path_is_dir and os.path.isdir(new_path)): + # If we're moving a directory, we need to + # remove the destination first or else it will be + # moved to inside the existing directory. + # We just created new_path ourselves, so it will + # be removable. + os.rmdir(new_path) + renames(path, new_path) + return new_path + + def commit(self): + # type: () -> None + """Commits the uninstall by removing stashed files.""" + for _, save_dir in self._save_dirs.items(): + save_dir.cleanup() + self._moves = [] + self._save_dirs = {} + + def rollback(self): + # type: () -> None + """Undoes the uninstall by moving stashed files back.""" + for p in self._moves: + logger.info("Moving to %s\n from %s", *p) + + for new_path, path in self._moves: + try: + logger.debug('Replacing %s from %s', new_path, path) + if os.path.isfile(new_path) or os.path.islink(new_path): + os.unlink(new_path) + elif os.path.isdir(new_path): + rmtree(new_path) + renames(path, new_path) + except OSError as ex: + logger.error("Failed to restore %s", new_path) + logger.debug("Exception: %s", ex) + + self.commit() + + @property + def can_rollback(self): + # type: () -> bool + return bool(self._moves) + + +class UninstallPathSet(object): + """A set of file paths to be removed in the uninstallation of a + requirement.""" + def __init__(self, dist): + # type: (Distribution) -> None + self.paths = set() # type: Set[str] + self._refuse = set() # type: Set[str] + self.pth = {} # type: Dict[str, UninstallPthEntries] + self.dist = dist + self._moved_paths = StashedUninstallPathSet() + + def _permitted(self, path): + # type: (str) -> bool + """ + Return True if the given path is one we are permitted to + remove/modify, False otherwise. + + """ + return is_local(path) + + def add(self, path): + # type: (str) -> None + head, tail = os.path.split(path) + + # we normalize the head to resolve parent directory symlinks, but not + # the tail, since we only want to uninstall symlinks, not their targets + path = os.path.join(normalize_path(head), os.path.normcase(tail)) + + if not os.path.exists(path): + return + if self._permitted(path): + self.paths.add(path) + else: + self._refuse.add(path) + + # __pycache__ files can show up after 'installed-files.txt' is created, + # due to imports + if os.path.splitext(path)[1] == '.py' and uses_pycache: + self.add(cache_from_source(path)) + + def add_pth(self, pth_file, entry): + # type: (str, str) -> None + pth_file = normalize_path(pth_file) + if self._permitted(pth_file): + if pth_file not in self.pth: + self.pth[pth_file] = UninstallPthEntries(pth_file) + self.pth[pth_file].add(entry) + else: + self._refuse.add(pth_file) + + def remove(self, auto_confirm=False, verbose=False): + # type: (bool, bool) -> None + """Remove paths in ``self.paths`` with confirmation (unless + ``auto_confirm`` is True).""" + + if not self.paths: + logger.info( + "Can't uninstall '%s'. No files were found to uninstall.", + self.dist.project_name, + ) + return + + dist_name_version = ( + self.dist.project_name + "-" + self.dist.version + ) + logger.info('Uninstalling %s:', dist_name_version) + + with indent_log(): + if auto_confirm or self._allowed_to_proceed(verbose): + moved = self._moved_paths + + for_rename = compress_for_rename(self.paths) + + for path in sorted(compact(for_rename)): + moved.stash(path) + logger.debug('Removing file or directory %s', path) + + for pth in self.pth.values(): + pth.remove() + + logger.info('Successfully uninstalled %s', dist_name_version) + + def _allowed_to_proceed(self, verbose): + # type: (bool) -> bool + """Display which files would be deleted and prompt for confirmation + """ + + def _display(msg, paths): + # type: (str, Iterable[str]) -> None + if not paths: + return + + logger.info(msg) + with indent_log(): + for path in sorted(compact(paths)): + logger.info(path) + + if not verbose: + will_remove, will_skip = compress_for_output_listing(self.paths) + else: + # In verbose mode, display all the files that are going to be + # deleted. + will_remove = set(self.paths) + will_skip = set() + + _display('Would remove:', will_remove) + _display('Would not remove (might be manually added):', will_skip) + _display('Would not remove (outside of prefix):', self._refuse) + if verbose: + _display('Will actually move:', compress_for_rename(self.paths)) + + return ask('Proceed (y/n)? ', ('y', 'n')) == 'y' + + def rollback(self): + # type: () -> None + """Rollback the changes previously made by remove().""" + if not self._moved_paths.can_rollback: + logger.error( + "Can't roll back %s; was not uninstalled", + self.dist.project_name, + ) + return + logger.info('Rolling back uninstall of %s', self.dist.project_name) + self._moved_paths.rollback() + for pth in self.pth.values(): + pth.rollback() + + def commit(self): + # type: () -> None + """Remove temporary save dir: rollback will no longer be possible.""" + self._moved_paths.commit() + + @classmethod + def from_dist(cls, dist): + # type: (Distribution) -> UninstallPathSet + dist_path = normalize_path(dist.location) + if not dist_is_local(dist): + logger.info( + "Not uninstalling %s at %s, outside environment %s", + dist.key, + dist_path, + sys.prefix, + ) + return cls(dist) + + if dist_path in {p for p in {sysconfig.get_path("stdlib"), + sysconfig.get_path("platstdlib")} + if p}: + logger.info( + "Not uninstalling %s at %s, as it is in the standard library.", + dist.key, + dist_path, + ) + return cls(dist) + + paths_to_remove = cls(dist) + develop_egg_link = egg_link_path(dist) + develop_egg_link_egg_info = '{}.egg-info'.format( + pkg_resources.to_filename(dist.project_name)) + egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) + # Special case for distutils installed package + distutils_egg_info = getattr(dist._provider, 'path', None) + + # Uninstall cases order do matter as in the case of 2 installs of the + # same package, pip needs to uninstall the currently detected version + if (egg_info_exists and dist.egg_info.endswith('.egg-info') and + not dist.egg_info.endswith(develop_egg_link_egg_info)): + # if dist.egg_info.endswith(develop_egg_link_egg_info), we + # are in fact in the develop_egg_link case + paths_to_remove.add(dist.egg_info) + if dist.has_metadata('installed-files.txt'): + for installed_file in dist.get_metadata( + 'installed-files.txt').splitlines(): + path = os.path.normpath( + os.path.join(dist.egg_info, installed_file) + ) + paths_to_remove.add(path) + # FIXME: need a test for this elif block + # occurs with --single-version-externally-managed/--record outside + # of pip + elif dist.has_metadata('top_level.txt'): + if dist.has_metadata('namespace_packages.txt'): + namespaces = dist.get_metadata('namespace_packages.txt') + else: + namespaces = [] + for top_level_pkg in [ + p for p + in dist.get_metadata('top_level.txt').splitlines() + if p and p not in namespaces]: + path = os.path.join(dist.location, top_level_pkg) + paths_to_remove.add(path) + paths_to_remove.add(path + '.py') + paths_to_remove.add(path + '.pyc') + paths_to_remove.add(path + '.pyo') + + elif distutils_egg_info: + raise UninstallationError( + "Cannot uninstall {!r}. It is a distutils installed project " + "and thus we cannot accurately determine which files belong " + "to it which would lead to only a partial uninstall.".format( + dist.project_name, + ) + ) + + elif dist.location.endswith('.egg'): + # package installed by easy_install + # We cannot match on dist.egg_name because it can slightly vary + # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg + paths_to_remove.add(dist.location) + easy_install_egg = os.path.split(dist.location)[1] + easy_install_pth = os.path.join(os.path.dirname(dist.location), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) + + elif egg_info_exists and dist.egg_info.endswith('.dist-info'): + for path in uninstallation_paths(dist): + paths_to_remove.add(path) + + elif develop_egg_link: + # develop egg + with open(develop_egg_link, 'r') as fh: + link_pointer = os.path.normcase(fh.readline().strip()) + assert (link_pointer == dist.location), ( + 'Egg-link %s does not match installed location of %s ' + '(at %s)' % (link_pointer, dist.project_name, dist.location) + ) + paths_to_remove.add(develop_egg_link) + easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, dist.location) + + else: + logger.debug( + 'Not sure how to uninstall: %s - Check: %s', + dist, dist.location, + ) + + # find distutils scripts= scripts + if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): + for script in dist.metadata_listdir('scripts'): + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + paths_to_remove.add(os.path.join(bin_dir, script)) + if WINDOWS: + paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') + + # find console_scripts + _scripts_to_remove = [] + console_scripts = dist.get_entry_map(group='console_scripts') + for name in console_scripts.keys(): + _scripts_to_remove.extend(_script_names(dist, name, False)) + # find gui_scripts + gui_scripts = dist.get_entry_map(group='gui_scripts') + for name in gui_scripts.keys(): + _scripts_to_remove.extend(_script_names(dist, name, True)) + + for s in _scripts_to_remove: + paths_to_remove.add(s) + + return paths_to_remove + + +class UninstallPthEntries(object): + def __init__(self, pth_file): + # type: (str) -> None + if not os.path.isfile(pth_file): + raise UninstallationError( + "Cannot remove entries from nonexistent file %s" % pth_file + ) + self.file = pth_file + self.entries = set() # type: Set[str] + self._saved_lines = None # type: Optional[List[bytes]] + + def add(self, entry): + # type: (str) -> None + entry = os.path.normcase(entry) + # On Windows, os.path.normcase converts the entry to use + # backslashes. This is correct for entries that describe absolute + # paths outside of site-packages, but all the others use forward + # slashes. + # os.path.splitdrive is used instead of os.path.isabs because isabs + # treats non-absolute paths with drive letter markings like c:foo\bar + # as absolute paths. It also does not recognize UNC paths if they don't + # have more than "\\sever\share". Valid examples: "\\server\share\" or + # "\\server\share\folder". Python 2.7.8+ support UNC in splitdrive. + if WINDOWS and not os.path.splitdrive(entry)[0]: + entry = entry.replace('\\', '/') + self.entries.add(entry) + + def remove(self): + # type: () -> None + logger.debug('Removing pth entries from %s:', self.file) + with open(self.file, 'rb') as fh: + # windows uses '\r\n' with py3k, but uses '\n' with py2.x + lines = fh.readlines() + self._saved_lines = lines + if any(b'\r\n' in line for line in lines): + endline = '\r\n' + else: + endline = '\n' + # handle missing trailing newline + if lines and not lines[-1].endswith(endline.encode("utf-8")): + lines[-1] = lines[-1] + endline.encode("utf-8") + for entry in self.entries: + try: + logger.debug('Removing entry: %s', entry) + lines.remove((entry + endline).encode("utf-8")) + except ValueError: + pass + with open(self.file, 'wb') as fh: + fh.writelines(lines) + + def rollback(self): + # type: () -> bool + if self._saved_lines is None: + logger.error( + 'Cannot roll back changes to %s, none were made', self.file + ) + return False + logger.debug('Rolling %s back to previous state', self.file) + with open(self.file, 'wb') as fh: + fh.writelines(self._saved_lines) + return True diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/self_outdated_check.py b/backend/test/lib/python3.8/site-packages/pip/_internal/self_outdated_check.py new file mode 100644 index 0000000000000000000000000000000000000000..8fc3c594acf96eb8dee7e69c9d835e16cd45cec3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/self_outdated_check.py @@ -0,0 +1,242 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import datetime +import hashlib +import json +import logging +import os.path +import sys + +from pip._vendor import pkg_resources +from pip._vendor.packaging import version as packaging_version +from pip._vendor.six import ensure_binary + +from pip._internal.index.collector import LinkCollector +from pip._internal.index.package_finder import PackageFinder +from pip._internal.models.search_scope import SearchScope +from pip._internal.models.selection_prefs import SelectionPreferences +from pip._internal.utils.filesystem import ( + adjacent_tmp_file, + check_path_owner, + replace, +) +from pip._internal.utils.misc import ( + ensure_dir, + get_installed_version, + redact_auth_from_url, +) +from pip._internal.utils.packaging import get_installer +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + import optparse + from optparse import Values + from typing import Any, Dict, Text, Union + + from pip._internal.network.session import PipSession + + +SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" + + +logger = logging.getLogger(__name__) + + +def make_link_collector( + session, # type: PipSession + options, # type: Values + suppress_no_index=False, # type: bool +): + # type: (...) -> LinkCollector + """ + :param session: The Session to use to make requests. + :param suppress_no_index: Whether to ignore the --no-index option + when constructing the SearchScope object. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index and not suppress_no_index: + logger.debug( + 'Ignoring indexes: %s', + ','.join(redact_auth_from_url(url) for url in index_urls), + ) + index_urls = [] + + # Make sure find_links is a list before passing to create(). + find_links = options.find_links or [] + + search_scope = SearchScope.create( + find_links=find_links, index_urls=index_urls, + ) + + link_collector = LinkCollector(session=session, search_scope=search_scope) + + return link_collector + + +def _get_statefile_name(key): + # type: (Union[str, Text]) -> str + key_bytes = ensure_binary(key) + name = hashlib.sha224(key_bytes).hexdigest() + return name + + +class SelfCheckState(object): + def __init__(self, cache_dir): + # type: (str) -> None + self.state = {} # type: Dict[str, Any] + self.statefile_path = None + + # Try to load the existing state + if cache_dir: + self.statefile_path = os.path.join( + cache_dir, "selfcheck", _get_statefile_name(self.key) + ) + try: + with open(self.statefile_path) as statefile: + self.state = json.load(statefile) + except (IOError, ValueError, KeyError): + # Explicitly suppressing exceptions, since we don't want to + # error out if the cache file is invalid. + pass + + @property + def key(self): + return sys.prefix + + def save(self, pypi_version, current_time): + # type: (str, datetime.datetime) -> None + # If we do not have a path to cache in, don't bother saving. + if not self.statefile_path: + return + + # Check to make sure that we own the directory + if not check_path_owner(os.path.dirname(self.statefile_path)): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + ensure_dir(os.path.dirname(self.statefile_path)) + + state = { + # Include the key so it's easy to tell which pip wrote the + # file. + "key": self.key, + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + } + + text = json.dumps(state, sort_keys=True, separators=(",", ":")) + + with adjacent_tmp_file(self.statefile_path) as f: + f.write(ensure_binary(text)) + + try: + # Since we have a prefix-specific state file, we can just + # overwrite whatever is there, no need to check. + replace(f.name, self.statefile_path) + except OSError: + # Best effort. + pass + + +def was_installed_by_pip(pkg): + # type: (str) -> bool + """Checks whether pkg was installed by pip + + This is used not to display the upgrade message when pip is in fact + installed by system package manager, such as dnf on Fedora. + """ + try: + dist = pkg_resources.get_distribution(pkg) + return "pip" == get_installer(dist) + except pkg_resources.DistributionNotFound: + return False + + +def pip_self_version_check(session, options): + # type: (PipSession, optparse.Values) -> None + """Check for an update for pip. + + Limit the frequency of checks to once per week. State is stored either in + the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix + of the pip script path. + """ + installed_version = get_installed_version("pip") + if not installed_version: + return + + pip_version = packaging_version.parse(installed_version) + pypi_version = None + + try: + state = SelfCheckState(cache_dir=options.cache_dir) + + current_time = datetime.datetime.utcnow() + # Determine if we need to refresh the state + if "last_check" in state.state and "pypi_version" in state.state: + last_check = datetime.datetime.strptime( + state.state["last_check"], + SELFCHECK_DATE_FMT + ) + if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60: + pypi_version = state.state["pypi_version"] + + # Refresh the version if we need to or just see if we need to warn + if pypi_version is None: + # Lets use PackageFinder to see what the latest pip version is + link_collector = make_link_collector( + session, + options=options, + suppress_no_index=True, + ) + + # Pass allow_yanked=False so we don't suggest upgrading to a + # yanked version. + selection_prefs = SelectionPreferences( + allow_yanked=False, + allow_all_prereleases=False, # Explicitly set to False + ) + + finder = PackageFinder.create( + link_collector=link_collector, + selection_prefs=selection_prefs, + ) + best_candidate = finder.find_best_candidate("pip").best_candidate + if best_candidate is None: + return + pypi_version = str(best_candidate.version) + + # save that we've performed a check + state.save(pypi_version, current_time) + + remote_version = packaging_version.parse(pypi_version) + + local_version_is_older = ( + pip_version < remote_version and + pip_version.base_version != remote_version.base_version and + was_installed_by_pip('pip') + ) + + # Determine if our pypi_version is older + if not local_version_is_older: + return + + # We cannot tell how the current pip is available in the current + # command context, so be pragmatic here and suggest the command + # that's always available. This does not accommodate spaces in + # `sys.executable`. + pip_cmd = "{} -m pip".format(sys.executable) + logger.warning( + "You are using pip version %s; however, version %s is " + "available.\nYou should consider upgrading via the " + "'%s install --upgrade pip' command.", + pip_version, pypi_version, pip_cmd + ) + except Exception: + logger.debug( + "There was an error checking the latest version of pip", + exc_info=True, + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..08d05084e7329d20cac14c72cd3d888630c9e665 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a0353011c7ecc4aa1342349846f464f26056490 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..65a31f0d253bb252e72221500fe8ea13e3b2afac Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4a097502660edcc82d3ddefed4ce40fdd90ebb32 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e6f67a152823a502225ec6da576952dee6fc372 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1965def55e5b8c6bb99c9d6894f9afecff3d3c26 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..32914538eac26c5dff6db9bc7b1e596d70900e92 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9f8df22fcd2e51198ce91d6255579028df766bbf Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c38fff28121e3c7d6135348e9d63f1704e2dc247 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..39085b95e2bf272f01c09e6d54bfbaf152e9924a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6111e0bbaf7147ee4c2e2bca1cff221f27f161fb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c7b3c3e13ac158bc871ee3dd4f0eeb2abaaa18b4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/inject_securetransport.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/logging.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/logging.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f327e797cd76a96772a1f58fb789cc65c03025e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/logging.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a6270165185237f5a0afe75282ca6747e0d979cc Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/marker_files.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/misc.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/misc.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a1e927978c59eaa6c74c346879f2528a53c63030 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/misc.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/models.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/models.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..adca06df62b4c920976fea1865b0137565fb4963 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/models.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3c87213fc143d80372f2dc7d993c7936732054b2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7f775ab1fe6e09930a04c0bbe69909b45a7bd8e4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/pkg_resources.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..17cf89fcc277377f6f1e75dbca58dd3f319731fa Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..004b3f6ea0c544106e0cc0f820feb96992eb20d3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9f19d2e5e79288549aaee2fe99cd5abd1042bf03 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/typing.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/typing.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c3ce0b76c5518271c44f8c6bbb01c2da13e4f799 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/typing.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/ui.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/ui.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8e21d7c63dbe45d08b12d7b49433c49881b0be6a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/ui.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..32ee4ec11a956ca77fd54b4ad77bcfe35968700d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/urls.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/urls.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9dc69ea426cb842e61b0f009c41962b22748a6a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/urls.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5b06000576169bc680840a587d3dac6de0b74b1c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0747234482784b88c670b9afee918696fc4069da Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/appdirs.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/appdirs.py new file mode 100644 index 0000000000000000000000000000000000000000..93d17b5a81bdeb3077ba18834a47a37c8d7f4841 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/appdirs.py @@ -0,0 +1,44 @@ +""" +This code wraps the vendored appdirs module to so the return values are +compatible for the current pip code base. + +The intention is to rewrite current usages gradually, keeping the tests pass, +and eventually drop this after all usages are changed. +""" + +from __future__ import absolute_import + +import os + +from pip._vendor import appdirs as _appdirs + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List + + +def user_cache_dir(appname): + # type: (str) -> str + return _appdirs.user_cache_dir(appname, appauthor=False) + + +def user_config_dir(appname, roaming=True): + # type: (str, bool) -> str + return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming) + + +def user_data_dir(appname, roaming=False): + # type: (str, bool) -> str + return _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming) + + +# for the discussion regarding site_config_dir locations +# see <https://github.com/pypa/pip/issues/1733> +def site_config_dirs(appname): + # type: (str) -> List[str] + dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True) + if _appdirs.system not in ["win32", "darwin"]: + # always look in /etc directly as well + return dirval.split(os.pathsep) + ['/etc'] + return [dirval] diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/compat.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..6efa52ad2b8daece49acf69daa1196582220f4a3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/compat.py @@ -0,0 +1,269 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import, division + +import codecs +import locale +import logging +import os +import shutil +import sys + +from pip._vendor.six import PY2, text_type + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, Text, Tuple, Union + +try: + import ipaddress +except ImportError: + try: + from pip._vendor import ipaddress # type: ignore + except ImportError: + import ipaddr as ipaddress # type: ignore + ipaddress.ip_address = ipaddress.IPAddress # type: ignore + ipaddress.ip_network = ipaddress.IPNetwork # type: ignore + + +__all__ = [ + "ipaddress", "uses_pycache", "console_to_str", + "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size", +] + + +logger = logging.getLogger(__name__) + +if PY2: + import imp + + try: + cache_from_source = imp.cache_from_source # type: ignore + except AttributeError: + # does not use __pycache__ + cache_from_source = None + + uses_pycache = cache_from_source is not None +else: + uses_pycache = True + from importlib.util import cache_from_source + + +if PY2: + # In Python 2.7, backslashreplace exists + # but does not support use for decoding. + # We implement our own replace handler for this + # situation, so that we can consistently use + # backslash replacement for all versions. + def backslashreplace_decode_fn(err): + raw_bytes = (err.object[i] for i in range(err.start, err.end)) + # Python 2 gave us characters - convert to numeric bytes + raw_bytes = (ord(b) for b in raw_bytes) + return u"".join(u"\\x%x" % c for c in raw_bytes), err.end + codecs.register_error( + "backslashreplace_decode", + backslashreplace_decode_fn, + ) + backslashreplace_decode = "backslashreplace_decode" +else: + backslashreplace_decode = "backslashreplace" + + +def has_tls(): + # type: () -> bool + try: + import _ssl # noqa: F401 # ignore unused + return True + except ImportError: + pass + + from pip._vendor.urllib3.util import IS_PYOPENSSL + return IS_PYOPENSSL + + +def str_to_display(data, desc=None): + # type: (Union[bytes, Text], Optional[str]) -> Text + """ + For display or logging purposes, convert a bytes object (or text) to + text (e.g. unicode in Python 2) safe for output. + + :param desc: An optional phrase describing the input data, for use in + the log message if a warning is logged. Defaults to "Bytes object". + + This function should never error out and so can take a best effort + approach. It is okay to be lossy if needed since the return value is + just for display. + + We assume the data is in the locale preferred encoding. If it won't + decode properly, we warn the user but decode as best we can. + + We also ensure that the output can be safely written to standard output + without encoding errors. + """ + if isinstance(data, text_type): + return data + + # Otherwise, data is a bytes object (str in Python 2). + # First, get the encoding we assume. This is the preferred + # encoding for the locale, unless that is not found, or + # it is ASCII, in which case assume UTF-8 + encoding = locale.getpreferredencoding() + if (not encoding) or codecs.lookup(encoding).name == "ascii": + encoding = "utf-8" + + # Now try to decode the data - if we fail, warn the user and + # decode with replacement. + try: + decoded_data = data.decode(encoding) + except UnicodeDecodeError: + if desc is None: + desc = 'Bytes object' + msg_format = '{} does not appear to be encoded as %s'.format(desc) + logger.warning(msg_format, encoding) + decoded_data = data.decode(encoding, errors=backslashreplace_decode) + + # Make sure we can print the output, by encoding it to the output + # encoding with replacement of unencodable characters, and then + # decoding again. + # We use stderr's encoding because it's less likely to be + # redirected and if we don't find an encoding we skip this + # step (on the assumption that output is wrapped by something + # that won't fail). + # The double getattr is to deal with the possibility that we're + # being called in a situation where sys.__stderr__ doesn't exist, + # or doesn't have an encoding attribute. Neither of these cases + # should occur in normal pip use, but there's no harm in checking + # in case people use pip in (unsupported) unusual situations. + output_encoding = getattr(getattr(sys, "__stderr__", None), + "encoding", None) + + if output_encoding: + output_encoded = decoded_data.encode( + output_encoding, + errors="backslashreplace" + ) + decoded_data = output_encoded.decode(output_encoding) + + return decoded_data + + +def console_to_str(data): + # type: (bytes) -> Text + """Return a string, safe for output, of subprocess output. + """ + return str_to_display(data, desc='Subprocess output') + + +def get_path_uid(path): + # type: (str) -> int + """ + Return path's uid. + + Does not follow symlinks: + https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in compat due to differences on AIX and + Jython, that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, 'O_NOFOLLOW'): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError( + "%s is a symlink; Will not return uid for symlinks" % path + ) + return file_uid + + +def expanduser(path): + # type: (str) -> str + """ + Expand ~ and ~user constructions. + + Includes a workaround for https://bugs.python.org/issue14768 + """ + expanded = os.path.expanduser(path) + if path.startswith('~/') and expanded.startswith('//'): + expanded = expanded[1:] + return expanded + + +# packages in the stdlib that may have installation metadata, but should not be +# considered 'installed'. this theoretically could be determined based on +# dist.location (py27:`sysconfig.get_paths()['stdlib']`, +# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may +# make this ineffective, so hard-coding +stdlib_pkgs = {"python", "wsgiref", "argparse"} + + +# windows detection, covers cpython and ironpython +WINDOWS = (sys.platform.startswith("win") or + (sys.platform == 'cli' and os.name == 'nt')) + + +def samefile(file1, file2): + # type: (str, str) -> bool + """Provide an alternative for os.path.samefile on Windows/Python2""" + if hasattr(os.path, 'samefile'): + return os.path.samefile(file1, file2) + else: + path1 = os.path.normcase(os.path.abspath(file1)) + path2 = os.path.normcase(os.path.abspath(file2)) + return path1 == path2 + + +if hasattr(shutil, 'get_terminal_size'): + def get_terminal_size(): + # type: () -> Tuple[int, int] + """ + Returns a tuple (x, y) representing the width(x) and the height(y) + in characters of the terminal window. + """ + return tuple(shutil.get_terminal_size()) # type: ignore +else: + def get_terminal_size(): + # type: () -> Tuple[int, int] + """ + Returns a tuple (x, y) representing the width(x) and the height(y) + in characters of the terminal window. + """ + def ioctl_GWINSZ(fd): + try: + import fcntl + import termios + import struct + cr = struct.unpack_from( + 'hh', + fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678') + ) + except Exception: + return None + if cr == (0, 0): + return None + return cr + cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + if not cr: + if sys.platform != "win32": + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except Exception: + pass + if not cr: + cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) + return int(cr[1]), int(cr[0]) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/deprecation.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/deprecation.py new file mode 100644 index 0000000000000000000000000000000000000000..2f20cfd49d32f0bbab7b4719eb2dbdca971b751a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/deprecation.py @@ -0,0 +1,104 @@ +""" +A module that implements tooling to enable easy warnings about deprecations. +""" + +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import warnings + +from pip._vendor.packaging.version import parse + +from pip import __version__ as current_version +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Optional + + +DEPRECATION_MSG_PREFIX = "DEPRECATION: " + + +class PipDeprecationWarning(Warning): + pass + + +_original_showwarning = None # type: Any + + +# Warnings <-> Logging Integration +def _showwarning(message, category, filename, lineno, file=None, line=None): + if file is not None: + if _original_showwarning is not None: + _original_showwarning( + message, category, filename, lineno, file, line, + ) + elif issubclass(category, PipDeprecationWarning): + # We use a specially named logger which will handle all of the + # deprecation messages for pip. + logger = logging.getLogger("pip._internal.deprecations") + logger.warning(message) + else: + _original_showwarning( + message, category, filename, lineno, file, line, + ) + + +def install_warning_logger(): + # type: () -> None + # Enable our Deprecation Warnings + warnings.simplefilter("default", PipDeprecationWarning, append=True) + + global _original_showwarning + + if _original_showwarning is None: + _original_showwarning = warnings.showwarning + warnings.showwarning = _showwarning + + +def deprecated(reason, replacement, gone_in, issue=None): + # type: (str, Optional[str], Optional[str], Optional[int]) -> None + """Helper to deprecate existing functionality. + + reason: + Textual reason shown to the user about why this functionality has + been deprecated. + replacement: + Textual suggestion shown to the user about what alternative + functionality they can use. + gone_in: + The version of pip does this functionality should get removed in. + Raises errors if pip's current version is greater than or equal to + this. + issue: + Issue number on the tracker that would serve as a useful place for + users to find related discussion and provide feedback. + + Always pass replacement, gone_in and issue as keyword arguments for clarity + at the call site. + """ + + # Construct a nice message. + # This is eagerly formatted as we want it to get logged as if someone + # typed this entire message out. + sentences = [ + (reason, DEPRECATION_MSG_PREFIX + "{}"), + (gone_in, "pip {} will remove support for this functionality."), + (replacement, "A possible replacement is {}."), + (issue, ( + "You can find discussion regarding this at " + "https://github.com/pypa/pip/issues/{}." + )), + ] + message = " ".join( + template.format(val) for val, template in sentences if val is not None + ) + + # Raise as an error if it has to be removed. + if gone_in is not None and parse(current_version) >= parse(gone_in): + raise PipDeprecationWarning(message) + + warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/distutils_args.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/distutils_args.py new file mode 100644 index 0000000000000000000000000000000000000000..e38e402d7330778385f65a440b5b39f7bcbdedb3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/distutils_args.py @@ -0,0 +1,48 @@ +from distutils.errors import DistutilsArgError +from distutils.fancy_getopt import FancyGetopt + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, List + + +_options = [ + ("exec-prefix=", None, ""), + ("home=", None, ""), + ("install-base=", None, ""), + ("install-data=", None, ""), + ("install-headers=", None, ""), + ("install-lib=", None, ""), + ("install-platlib=", None, ""), + ("install-purelib=", None, ""), + ("install-scripts=", None, ""), + ("prefix=", None, ""), + ("root=", None, ""), + ("user", None, ""), +] + + +# typeshed doesn't permit Tuple[str, None, str], see python/typeshed#3469. +_distutils_getopt = FancyGetopt(_options) # type: ignore + + +def parse_distutils_args(args): + # type: (List[str]) -> Dict[str, str] + """Parse provided arguments, returning an object that has the + matched arguments. + + Any unknown arguments are ignored. + """ + result = {} + for arg in args: + try: + _, match = _distutils_getopt.getopt(args=[arg]) + except DistutilsArgError: + # We don't care about any other options, which here may be + # considered unrecognized since our option list is not + # exhaustive. + pass + else: + result.update(match.__dict__) + return result diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/encoding.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..ab4d4b98e3e1bca6f28db1ae114e48933a36be4e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/encoding.py @@ -0,0 +1,42 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import codecs +import locale +import re +import sys + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Tuple, Text + +BOMS = [ + (codecs.BOM_UTF8, 'utf-8'), + (codecs.BOM_UTF16, 'utf-16'), + (codecs.BOM_UTF16_BE, 'utf-16-be'), + (codecs.BOM_UTF16_LE, 'utf-16-le'), + (codecs.BOM_UTF32, 'utf-32'), + (codecs.BOM_UTF32_BE, 'utf-32-be'), + (codecs.BOM_UTF32_LE, 'utf-32-le'), +] # type: List[Tuple[bytes, Text]] + +ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') + + +def auto_decode(data): + # type: (bytes) -> Text + """Check a bytes string for a BOM to correctly detect the encoding + + Fallback to locale.getpreferredencoding(False) like open() on Python3""" + for bom, encoding in BOMS: + if data.startswith(bom): + return data[len(bom):].decode(encoding) + # Lets check the first two lines as in PEP263 + for line in data.split(b'\n')[:2]: + if line[0:1] == b'#' and ENCODING_RE.search(line): + encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') + return data.decode(encoding) + return data.decode( + locale.getpreferredencoding(False) or sys.getdefaultencoding(), + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/entrypoints.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/entrypoints.py new file mode 100644 index 0000000000000000000000000000000000000000..befd01c890184c74534bfefa1abd2376f234ac42 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/entrypoints.py @@ -0,0 +1,31 @@ +import sys + +from pip._internal.cli.main import main +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, List + + +def _wrapper(args=None): + # type: (Optional[List[str]]) -> int + """Central wrapper for all old entrypoints. + + Historically pip has had several entrypoints defined. Because of issues + arising from PATH, sys.path, multiple Pythons, their interactions, and most + of them having a pip installed, users suffer every time an entrypoint gets + moved. + + To alleviate this pain, and provide a mechanism for warning users and + directing them to an appropriate place for help, we now define all of + our old entrypoints as wrappers for the current one. + """ + sys.stderr.write( + "WARNING: pip is being invoked by an old script wrapper. This will " + "fail in a future version of pip.\n" + "Please see https://github.com/pypa/pip/issues/5599 for advice on " + "fixing the underlying issue.\n" + "To avoid this problem you can invoke Python with '-m pip' instead of " + "running pip directly.\n" + ) + return main(args) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/filesystem.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/filesystem.py new file mode 100644 index 0000000000000000000000000000000000000000..6f1537e4032617d294b26db09db1d85af4ad0dc2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/filesystem.py @@ -0,0 +1,171 @@ +import errno +import os +import os.path +import random +import shutil +import stat +import sys +from contextlib import contextmanager +from tempfile import NamedTemporaryFile + +# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import. +from pip._vendor.retrying import retry # type: ignore +from pip._vendor.six import PY2 + +from pip._internal.utils.compat import get_path_uid +from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast + +if MYPY_CHECK_RUNNING: + from typing import BinaryIO, Iterator + + class NamedTemporaryFileResult(BinaryIO): + @property + def file(self): + # type: () -> BinaryIO + pass + + +def check_path_owner(path): + # type: (str) -> bool + # If we don't have a way to check the effective uid of this process, then + # we'll just assume that we own the directory. + if sys.platform == "win32" or not hasattr(os, "geteuid"): + return True + + assert os.path.isabs(path) + + previous = None + while path != previous: + if os.path.lexists(path): + # Check if path is writable by current user. + if os.geteuid() == 0: + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 + else: + return os.access(path, os.W_OK) + else: + previous, path = path, os.path.dirname(path) + return False # assume we don't own the path + + +def copy2_fixed(src, dest): + # type: (str, str) -> None + """Wrap shutil.copy2() but map errors copying socket files to + SpecialFileError as expected. + + See also https://bugs.python.org/issue37700. + """ + try: + shutil.copy2(src, dest) + except (OSError, IOError): + for f in [src, dest]: + try: + is_socket_file = is_socket(f) + except OSError: + # An error has already occurred. Another error here is not + # a problem and we can ignore it. + pass + else: + if is_socket_file: + raise shutil.SpecialFileError("`%s` is a socket" % f) + + raise + + +def is_socket(path): + # type: (str) -> bool + return stat.S_ISSOCK(os.lstat(path).st_mode) + + +@contextmanager +def adjacent_tmp_file(path): + # type: (str) -> Iterator[NamedTemporaryFileResult] + """Given a path to a file, open a temp file next to it securely and ensure + it is written to disk after the context reaches its end. + """ + with NamedTemporaryFile( + delete=False, + dir=os.path.dirname(path), + prefix=os.path.basename(path), + suffix='.tmp', + ) as f: + result = cast('NamedTemporaryFileResult', f) + try: + yield result + finally: + result.file.flush() + os.fsync(result.file.fileno()) + + +_replace_retry = retry(stop_max_delay=1000, wait_fixed=250) + +if PY2: + @_replace_retry + def replace(src, dest): + # type: (str, str) -> None + try: + os.rename(src, dest) + except OSError: + os.remove(dest) + os.rename(src, dest) + +else: + replace = _replace_retry(os.replace) + + +# test_writable_dir and _test_writable_dir_win are copied from Flit, +# with the author's agreement to also place them under pip's license. +def test_writable_dir(path): + # type: (str) -> bool + """Check if a directory is writable. + + Uses os.access() on POSIX, tries creating files on Windows. + """ + # If the directory doesn't exist, find the closest parent that does. + while not os.path.isdir(path): + parent = os.path.dirname(path) + if parent == path: + break # Should never get here, but infinite loops are bad + path = parent + + if os.name == 'posix': + return os.access(path, os.W_OK) + + return _test_writable_dir_win(path) + + +def _test_writable_dir_win(path): + # type: (str) -> bool + # os.access doesn't work on Windows: http://bugs.python.org/issue2528 + # and we can't use tempfile: http://bugs.python.org/issue22107 + basename = 'accesstest_deleteme_fishfingers_custard_' + alphabet = 'abcdefghijklmnopqrstuvwxyz0123456789' + for i in range(10): + name = basename + ''.join(random.choice(alphabet) for _ in range(6)) + file = os.path.join(path, name) + try: + fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL) + except OSError as e: + if e.errno == errno.EEXIST: + continue + if e.errno == errno.EPERM: + # This could be because there's a directory with the same name. + # But it's highly unlikely there's a directory called that, + # so we'll assume it's because the parent dir is not writable. + return False + raise + else: + os.close(fd) + os.unlink(file) + return True + + # This should never be reached + raise EnvironmentError( + 'Unexpected condition testing for writable directory' + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/filetypes.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/filetypes.py new file mode 100644 index 0000000000000000000000000000000000000000..daa0ca771b77a32bf498d07803f5bffc34b1abf9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/filetypes.py @@ -0,0 +1,16 @@ +"""Filetype information. +""" +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Tuple + +WHEEL_EXTENSION = '.whl' +BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') # type: Tuple[str, ...] +XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', + '.tar.lz', '.tar.lzma') # type: Tuple[str, ...] +ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION) # type: Tuple[str, ...] +TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') # type: Tuple[str, ...] +ARCHIVE_EXTENSIONS = ( + ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS +) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/glibc.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/glibc.py new file mode 100644 index 0000000000000000000000000000000000000000..361042441384693dbeeb9424c78dedf3bdbb8a3d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/glibc.py @@ -0,0 +1,98 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import os +import sys + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + + +def glibc_version_string(): + # type: () -> Optional[str] + "Returns glibc version string, or None if not using glibc." + return glibc_version_string_confstr() or glibc_version_string_ctypes() + + +def glibc_version_string_confstr(): + # type: () -> Optional[str] + "Primary implementation of glibc_version_string using os.confstr." + # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely + # to be broken or missing. This strategy is used in the standard library + # platform module: + # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 + if sys.platform == "win32": + return None + try: + # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": + _, version = os.confstr("CS_GNU_LIBC_VERSION").split() + except (AttributeError, OSError, ValueError): + # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... + return None + return version + + +def glibc_version_string_ctypes(): + # type: () -> Optional[str] + "Fallback implementation of glibc_version_string using ctypes." + + try: + import ctypes + except ImportError: + return None + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + process_namespace = ctypes.CDLL(None) + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# platform.libc_ver regularly returns completely nonsensical glibc +# versions. E.g. on my computer, platform says: +# +# ~$ python2.7 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.7') +# ~$ python3.5 -c 'import platform; print(platform.libc_ver())' +# ('glibc', '2.9') +# +# But the truth is: +# +# ~$ ldd --version +# ldd (Debian GLIBC 2.22-11) 2.22 +# +# This is unfortunate, because it means that the linehaul data on libc +# versions that was generated by pip 8.1.2 and earlier is useless and +# misleading. Solution: instead of using platform, use our code that actually +# works. +def libc_ver(): + # type: () -> Tuple[str, str] + """Try to determine the glibc version + + Returns a tuple of strings (lib, version) which default to empty strings + in case the lookup fails. + """ + glibc_version = glibc_version_string() + if glibc_version is None: + return ("", "") + else: + return ("glibc", glibc_version) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/hashes.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/hashes.py new file mode 100644 index 0000000000000000000000000000000000000000..4c41551a25597aa646d480c7a896ab9f151fff96 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/hashes.py @@ -0,0 +1,131 @@ +from __future__ import absolute_import + +import hashlib + +from pip._vendor.six import iteritems, iterkeys, itervalues + +from pip._internal.exceptions import ( + HashMismatch, + HashMissing, + InstallationError, +) +from pip._internal.utils.misc import read_chunks +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( + Dict, List, BinaryIO, NoReturn, Iterator + ) + from pip._vendor.six import PY3 + if PY3: + from hashlib import _Hash + else: + from hashlib import _hash as _Hash + + +# The recommended hash algo of the moment. Change this whenever the state of +# the art changes; it won't hurt backward compatibility. +FAVORITE_HASH = 'sha256' + + +# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` +# Currently, those are the ones at least as collision-resistant as sha256. +STRONG_HASHES = ['sha256', 'sha384', 'sha512'] + + +class Hashes(object): + """A wrapper that builds multiple hashes at once and checks them against + known-good values + + """ + def __init__(self, hashes=None): + # type: (Dict[str, List[str]]) -> None + """ + :param hashes: A dict of algorithm names pointing to lists of allowed + hex digests + """ + self._allowed = {} if hashes is None else hashes + + @property + def digest_count(self): + # type: () -> int + return sum(len(digests) for digests in self._allowed.values()) + + def is_hash_allowed( + self, + hash_name, # type: str + hex_digest, # type: str + ): + # type: (...) -> bool + """Return whether the given hex digest is allowed.""" + return hex_digest in self._allowed.get(hash_name, []) + + def check_against_chunks(self, chunks): + # type: (Iterator[bytes]) -> None + """Check good hashes against ones built from iterable of chunks of + data. + + Raise HashMismatch if none match. + + """ + gots = {} + for hash_name in iterkeys(self._allowed): + try: + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError('Unknown hash name: %s' % hash_name) + + for chunk in chunks: + for hash in itervalues(gots): + hash.update(chunk) + + for hash_name, got in iteritems(gots): + if got.hexdigest() in self._allowed[hash_name]: + return + self._raise(gots) + + def _raise(self, gots): + # type: (Dict[str, _Hash]) -> NoReturn + raise HashMismatch(self._allowed, gots) + + def check_against_file(self, file): + # type: (BinaryIO) -> None + """Check good hashes against a file-like object + + Raise HashMismatch if none match. + + """ + return self.check_against_chunks(read_chunks(file)) + + def check_against_path(self, path): + # type: (str) -> None + with open(path, 'rb') as file: + return self.check_against_file(file) + + def __nonzero__(self): + # type: () -> bool + """Return whether I know any known-good hashes.""" + return bool(self._allowed) + + def __bool__(self): + # type: () -> bool + return self.__nonzero__() + + +class MissingHashes(Hashes): + """A workalike for Hashes used when we're missing a hash for a requirement + + It computes the actual hash of the requirement and raises a HashMissing + exception showing it to the user. + + """ + def __init__(self): + # type: () -> None + """Don't offer the ``hashes`` kwarg.""" + # Pass our favorite hash in to generate a "gotten hash". With the + # empty list, it will never match, so an error will always raise. + super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) + + def _raise(self, gots): + # type: (Dict[str, _Hash]) -> NoReturn + raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/inject_securetransport.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/inject_securetransport.py new file mode 100644 index 0000000000000000000000000000000000000000..5b93b1d6730518ec49afe78bdfbe74407825d8ee --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/inject_securetransport.py @@ -0,0 +1,36 @@ +"""A helper module that injects SecureTransport, on import. + +The import should be done as early as possible, to ensure all requests and +sessions (or whatever) are created after injecting SecureTransport. + +Note that we only do the injection on macOS, when the linked OpenSSL is too +old to handle TLSv1.2. +""" + +import sys + + +def inject_securetransport(): + # type: () -> None + # Only relevant on macOS + if sys.platform != "darwin": + return + + try: + import ssl + except ImportError: + return + + # Checks for OpenSSL 1.0.1 + if ssl.OPENSSL_VERSION_NUMBER >= 0x1000100f: + return + + try: + from pip._vendor.urllib3.contrib import securetransport + except (ImportError, OSError): + return + + securetransport.inject_into_urllib3() + + +inject_securetransport() diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/logging.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/logging.py new file mode 100644 index 0000000000000000000000000000000000000000..7767111a6ba90278807dac5efd7a3ab59cc92fe1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/logging.py @@ -0,0 +1,398 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import contextlib +import errno +import logging +import logging.handlers +import os +import sys +from logging import Filter, getLogger + +from pip._vendor.six import PY2 + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX +from pip._internal.utils.misc import ensure_dir + +try: + import threading +except ImportError: + import dummy_threading as threading # type: ignore + + +try: + # Use "import as" and set colorama in the else clause to avoid mypy + # errors and get the following correct revealed type for colorama: + # `Union[_importlib_modulespec.ModuleType, None]` + # Otherwise, we get an error like the following in the except block: + # > Incompatible types in assignment (expression has type "None", + # variable has type Module) + # TODO: eliminate the need to use "import as" once mypy addresses some + # of its issues with conditional imports. Here is an umbrella issue: + # https://github.com/python/mypy/issues/1297 + from pip._vendor import colorama as _colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None +else: + # Import Fore explicitly rather than accessing below as colorama.Fore + # to avoid the following error running mypy: + # > Module has no attribute "Fore" + # TODO: eliminate the need to import Fore once mypy addresses some of its + # issues with conditional imports. This particular case could be an + # instance of the following issue (but also see the umbrella issue above): + # https://github.com/python/mypy/issues/3500 + from pip._vendor.colorama import Fore + + colorama = _colorama + + +_log_state = threading.local() +_log_state.indentation = 0 +subprocess_logger = getLogger('pip.subprocessor') + + +class BrokenStdoutLoggingError(Exception): + """ + Raised if BrokenPipeError occurs for the stdout stream while logging. + """ + pass + + +# BrokenPipeError does not exist in Python 2 and, in addition, manifests +# differently in Windows and non-Windows. +if WINDOWS: + # In Windows, a broken pipe can show up as EINVAL rather than EPIPE: + # https://bugs.python.org/issue19612 + # https://bugs.python.org/issue30418 + if PY2: + def _is_broken_pipe_error(exc_class, exc): + """See the docstring for non-Windows Python 3 below.""" + return (exc_class is IOError and + exc.errno in (errno.EINVAL, errno.EPIPE)) + else: + # In Windows, a broken pipe IOError became OSError in Python 3. + def _is_broken_pipe_error(exc_class, exc): + """See the docstring for non-Windows Python 3 below.""" + return ((exc_class is BrokenPipeError) or # noqa: F821 + (exc_class is OSError and + exc.errno in (errno.EINVAL, errno.EPIPE))) +elif PY2: + def _is_broken_pipe_error(exc_class, exc): + """See the docstring for non-Windows Python 3 below.""" + return (exc_class is IOError and exc.errno == errno.EPIPE) +else: + # Then we are in the non-Windows Python 3 case. + def _is_broken_pipe_error(exc_class, exc): + """ + Return whether an exception is a broken pipe error. + + Args: + exc_class: an exception class. + exc: an exception instance. + """ + return (exc_class is BrokenPipeError) # noqa: F821 + + +@contextlib.contextmanager +def indent_log(num=2): + """ + A context manager which will cause the log output to be indented for any + log messages emitted inside it. + """ + _log_state.indentation += num + try: + yield + finally: + _log_state.indentation -= num + + +def get_indentation(): + return getattr(_log_state, 'indentation', 0) + + +class IndentingFormatter(logging.Formatter): + + def __init__(self, *args, **kwargs): + """ + A logging.Formatter that obeys the indent_log() context manager. + + :param add_timestamp: A bool indicating output lines should be prefixed + with their record's timestamp. + """ + self.add_timestamp = kwargs.pop("add_timestamp", False) + super(IndentingFormatter, self).__init__(*args, **kwargs) + + def get_message_start(self, formatted, levelno): + """ + Return the start of the formatted log message (not counting the + prefix to add to each line). + """ + if levelno < logging.WARNING: + return '' + if formatted.startswith(DEPRECATION_MSG_PREFIX): + # Then the message already has a prefix. We don't want it to + # look like "WARNING: DEPRECATION: ...." + return '' + if levelno < logging.ERROR: + return 'WARNING: ' + + return 'ERROR: ' + + def format(self, record): + """ + Calls the standard formatter, but will indent all of the log message + lines by our current indentation level. + """ + formatted = super(IndentingFormatter, self).format(record) + message_start = self.get_message_start(formatted, record.levelno) + formatted = message_start + formatted + + prefix = '' + if self.add_timestamp: + # TODO: Use Formatter.default_time_format after dropping PY2. + t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S") + prefix = '%s,%03d ' % (t, record.msecs) + prefix += " " * get_indentation() + formatted = "".join([ + prefix + line + for line in formatted.splitlines(True) + ]) + return formatted + + +def _color_wrap(*colors): + def wrapped(inp): + return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) + return wrapped + + +class ColorizedStreamHandler(logging.StreamHandler): + + # Don't build up a list of colors if we don't have colorama + if colorama: + COLORS = [ + # This needs to be in order from highest logging level to lowest. + (logging.ERROR, _color_wrap(Fore.RED)), + (logging.WARNING, _color_wrap(Fore.YELLOW)), + ] + else: + COLORS = [] + + def __init__(self, stream=None, no_color=None): + logging.StreamHandler.__init__(self, stream) + self._no_color = no_color + + if WINDOWS and colorama: + self.stream = colorama.AnsiToWin32(self.stream) + + def _using_stdout(self): + """ + Return whether the handler is using sys.stdout. + """ + if WINDOWS and colorama: + # Then self.stream is an AnsiToWin32 object. + return self.stream.wrapped is sys.stdout + + return self.stream is sys.stdout + + def should_color(self): + # Don't colorize things if we do not have colorama or if told not to + if not colorama or self._no_color: + return False + + real_stream = ( + self.stream if not isinstance(self.stream, colorama.AnsiToWin32) + else self.stream.wrapped + ) + + # If the stream is a tty we should color it + if hasattr(real_stream, "isatty") and real_stream.isatty(): + return True + + # If we have an ANSI term we should color it + if os.environ.get("TERM") == "ANSI": + return True + + # If anything else we should not color it + return False + + def format(self, record): + msg = logging.StreamHandler.format(self, record) + + if self.should_color(): + for level, color in self.COLORS: + if record.levelno >= level: + msg = color(msg) + break + + return msg + + # The logging module says handleError() can be customized. + def handleError(self, record): + exc_class, exc = sys.exc_info()[:2] + # If a broken pipe occurred while calling write() or flush() on the + # stdout stream in logging's Handler.emit(), then raise our special + # exception so we can handle it in main() instead of logging the + # broken pipe error and continuing. + if (exc_class and self._using_stdout() and + _is_broken_pipe_error(exc_class, exc)): + raise BrokenStdoutLoggingError() + + return super(ColorizedStreamHandler, self).handleError(record) + + +class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): + + def _open(self): + ensure_dir(os.path.dirname(self.baseFilename)) + return logging.handlers.RotatingFileHandler._open(self) + + +class MaxLevelFilter(Filter): + + def __init__(self, level): + self.level = level + + def filter(self, record): + return record.levelno < self.level + + +class ExcludeLoggerFilter(Filter): + + """ + A logging Filter that excludes records from a logger (or its children). + """ + + def filter(self, record): + # The base Filter class allows only records from a logger (or its + # children). + return not super(ExcludeLoggerFilter, self).filter(record) + + +def setup_logging(verbosity, no_color, user_log_file): + """Configures and sets up all of the logging + + Returns the requested logging level, as its integer value. + """ + + # Determine the level to be logging at. + if verbosity >= 1: + level = "DEBUG" + elif verbosity == -1: + level = "WARNING" + elif verbosity == -2: + level = "ERROR" + elif verbosity <= -3: + level = "CRITICAL" + else: + level = "INFO" + + level_number = getattr(logging, level) + + # The "root" logger should match the "console" level *unless* we also need + # to log to a user log file. + include_user_log = user_log_file is not None + if include_user_log: + additional_log_file = user_log_file + root_level = "DEBUG" + else: + additional_log_file = "/dev/null" + root_level = level + + # Disable any logging besides WARNING unless we have DEBUG level logging + # enabled for vendored libraries. + vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG" + + # Shorthands for clarity + log_streams = { + "stdout": "ext://sys.stdout", + "stderr": "ext://sys.stderr", + } + handler_classes = { + "stream": "pip._internal.utils.logging.ColorizedStreamHandler", + "file": "pip._internal.utils.logging.BetterRotatingFileHandler", + } + handlers = ["console", "console_errors", "console_subprocess"] + ( + ["user_log"] if include_user_log else [] + ) + + logging.config.dictConfig({ + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip._internal.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + "restrict_to_subprocess": { + "()": "logging.Filter", + "name": subprocess_logger.name, + }, + "exclude_subprocess": { + "()": "pip._internal.utils.logging.ExcludeLoggerFilter", + "name": subprocess_logger.name, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + "indent_with_timestamp": { + "()": IndentingFormatter, + "format": "%(message)s", + "add_timestamp": True, + }, + }, + "handlers": { + "console": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stdout"], + "filters": ["exclude_subprocess", "exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["exclude_subprocess"], + "formatter": "indent", + }, + # A handler responsible for logging to the console messages + # from the "subprocessor" logger. + "console_subprocess": { + "level": level, + "class": handler_classes["stream"], + "no_color": no_color, + "stream": log_streams["stderr"], + "filters": ["restrict_to_subprocess"], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": handler_classes["file"], + "filename": additional_log_file, + "delay": True, + "formatter": "indent_with_timestamp", + }, + }, + "root": { + "level": root_level, + "handlers": handlers, + }, + "loggers": { + "pip._vendor": { + "level": vendored_log_level + } + }, + }) + + return level_number diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/marker_files.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/marker_files.py new file mode 100644 index 0000000000000000000000000000000000000000..42ea81405085a0000c587ad563fee30c7f37a026 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/marker_files.py @@ -0,0 +1,25 @@ +import os.path + +DELETE_MARKER_MESSAGE = '''\ +This file is placed here by pip to indicate the source was put +here by pip. + +Once this package is successfully installed this source code will be +deleted (unless you remove this file). +''' +PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' + + +def has_delete_marker_file(directory): + # type: (str) -> bool + return os.path.exists(os.path.join(directory, PIP_DELETE_MARKER_FILENAME)) + + +def write_delete_marker_file(directory): + # type: (str) -> None + """ + Write the pip delete marker file into this directory. + """ + filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) + with open(filepath, 'w') as marker_fp: + marker_fp.write(DELETE_MARKER_MESSAGE) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/misc.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..554af0bf7b9b8c03de1b2cd3f3eda09a31c60a41 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/misc.py @@ -0,0 +1,904 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import contextlib +import errno +import getpass +import hashlib +import io +import logging +import os +import posixpath +import shutil +import stat +import sys +from collections import deque + +from pip._vendor import pkg_resources +# NOTE: retrying is not annotated in typeshed as on 2017-07-17, which is +# why we ignore the type on this import. +from pip._vendor.retrying import retry # type: ignore +from pip._vendor.six import PY2, text_type +from pip._vendor.six.moves import input +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib.parse import unquote as urllib_unquote + +from pip import __version__ +from pip._internal.exceptions import CommandError +from pip._internal.locations import ( + get_major_minor_version, + site_packages, + user_site, +) +from pip._internal.utils.compat import ( + WINDOWS, + expanduser, + stdlib_pkgs, + str_to_display, +) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast +from pip._internal.utils.virtualenv import ( + running_under_virtualenv, + virtualenv_no_global, +) + +if PY2: + from io import BytesIO as StringIO +else: + from io import StringIO + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, AnyStr, Container, Iterable, List, Optional, Text, + Tuple, Union, + ) + from pip._vendor.pkg_resources import Distribution + + VersionInfo = Tuple[int, int, int] + + +__all__ = ['rmtree', 'display_path', 'backup_dir', + 'ask', 'splitext', + 'format_size', 'is_installable_dir', + 'normalize_path', + 'renames', 'get_prog', + 'captured_stdout', 'ensure_dir', + 'get_installed_version', 'remove_auth_from_url'] + + +logger = logging.getLogger(__name__) + + +def get_pip_version(): + # type: () -> str + pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..") + pip_pkg_dir = os.path.abspath(pip_pkg_dir) + + return ( + 'pip {} from {} (python {})'.format( + __version__, pip_pkg_dir, get_major_minor_version(), + ) + ) + + +def normalize_version_info(py_version_info): + # type: (Tuple[int, ...]) -> Tuple[int, int, int] + """ + Convert a tuple of ints representing a Python version to one of length + three. + + :param py_version_info: a tuple of ints representing a Python version, + or None to specify no version. The tuple can have any length. + + :return: a tuple of length three if `py_version_info` is non-None. + Otherwise, return `py_version_info` unchanged (i.e. None). + """ + if len(py_version_info) < 3: + py_version_info += (3 - len(py_version_info)) * (0,) + elif len(py_version_info) > 3: + py_version_info = py_version_info[:3] + + return cast('VersionInfo', py_version_info) + + +def ensure_dir(path): + # type: (AnyStr) -> None + """os.path.makedirs without EEXIST.""" + try: + os.makedirs(path) + except OSError as e: + # Windows can raise spurious ENOTEMPTY errors. See #6426. + if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY: + raise + + +def get_prog(): + # type: () -> str + try: + prog = os.path.basename(sys.argv[0]) + if prog in ('__main__.py', '-c'): + return "%s -m pip" % sys.executable + else: + return prog + except (AttributeError, TypeError, IndexError): + pass + return 'pip' + + +# Retry every half second for up to 3 seconds +@retry(stop_max_delay=3000, wait_fixed=500) +def rmtree(dir, ignore_errors=False): + # type: (str, bool) -> None + shutil.rmtree(dir, ignore_errors=ignore_errors, + onerror=rmtree_errorhandler) + + +def rmtree_errorhandler(func, path, exc_info): + """On Windows, the files in .svn are read-only, so when rmtree() tries to + remove them, an exception is thrown. We catch that here, remove the + read-only attribute, and hopefully continue without problems.""" + try: + has_attr_readonly = not (os.stat(path).st_mode & stat.S_IWRITE) + except (IOError, OSError): + # it's equivalent to os.path.exists + return + + if has_attr_readonly: + # convert to read/write + os.chmod(path, stat.S_IWRITE) + # use the original function to repeat the operation + func(path) + return + else: + raise + + +def path_to_display(path): + # type: (Optional[Union[str, Text]]) -> Optional[Text] + """ + Convert a bytes (or text) path to text (unicode in Python 2) for display + and logging purposes. + + This function should never error out. Also, this function is mainly needed + for Python 2 since in Python 3 str paths are already text. + """ + if path is None: + return None + if isinstance(path, text_type): + return path + # Otherwise, path is a bytes object (str in Python 2). + try: + display_path = path.decode(sys.getfilesystemencoding(), 'strict') + except UnicodeDecodeError: + # Include the full bytes to make troubleshooting easier, even though + # it may not be very human readable. + if PY2: + # Convert the bytes to a readable str representation using + # repr(), and then convert the str to unicode. + # Also, we add the prefix "b" to the repr() return value both + # to make the Python 2 output look like the Python 3 output, and + # to signal to the user that this is a bytes representation. + display_path = str_to_display('b{!r}'.format(path)) + else: + # Silence the "F821 undefined name 'ascii'" flake8 error since + # in Python 3 ascii() is a built-in. + display_path = ascii(path) # noqa: F821 + + return display_path + + +def display_path(path): + # type: (Union[str, Text]) -> str + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if sys.version_info[0] == 2: + path = path.decode(sys.getfilesystemencoding(), 'replace') + path = path.encode(sys.getdefaultencoding(), 'replace') + if path.startswith(os.getcwd() + os.path.sep): + path = '.' + path[len(os.getcwd()):] + return path + + +def backup_dir(dir, ext='.bak'): + # type: (str, str) -> str + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def ask_path_exists(message, options): + # type: (str, Iterable[str]) -> str + for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): + if action in options: + return action + return ask(message, options) + + +def _check_no_input(message): + # type: (str) -> None + """Raise an error if no input is allowed.""" + if os.environ.get('PIP_NO_INPUT'): + raise Exception( + 'No input was expected ($PIP_NO_INPUT set); question: %s' % + message + ) + + +def ask(message, options): + # type: (str, Iterable[str]) -> str + """Ask the message interactively, with the given possible responses""" + while 1: + _check_no_input(message) + response = input(message) + response = response.strip().lower() + if response not in options: + print( + 'Your response (%r) was not one of the expected responses: ' + '%s' % (response, ', '.join(options)) + ) + else: + return response + + +def ask_input(message): + # type: (str) -> str + """Ask for input interactively.""" + _check_no_input(message) + return input(message) + + +def ask_password(message): + # type: (str) -> str + """Ask for a password interactively.""" + _check_no_input(message) + return getpass.getpass(message) + + +def format_size(bytes): + # type: (float) -> str + if bytes > 1000 * 1000: + return '%.1f MB' % (bytes / 1000.0 / 1000) + elif bytes > 10 * 1000: + return '%i kB' % (bytes / 1000) + elif bytes > 1000: + return '%.1f kB' % (bytes / 1000.0) + else: + return '%i bytes' % bytes + + +def is_installable_dir(path): + # type: (str) -> bool + """Is path is a directory containing setup.py or pyproject.toml? + """ + if not os.path.isdir(path): + return False + setup_py = os.path.join(path, 'setup.py') + if os.path.isfile(setup_py): + return True + pyproject_toml = os.path.join(path, 'pyproject.toml') + if os.path.isfile(pyproject_toml): + return True + return False + + +def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): + """Yield pieces of data from a file-like object until EOF.""" + while True: + chunk = file.read(size) + if not chunk: + break + yield chunk + + +def normalize_path(path, resolve_symlinks=True): + # type: (str, bool) -> str + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + path = expanduser(path) + if resolve_symlinks: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + return os.path.normcase(path) + + +def splitext(path): + # type: (str) -> Tuple[str, str] + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith('.tar'): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old, new): + # type: (str, str) -> None + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path): + # type: (str) -> bool + """ + Return True if this is a path pip is allowed to modify. + + If we're in a virtualenv, sys.prefix points to the virtualenv's + prefix; only sys.prefix is considered local. + + If we're not in a virtualenv, in general we can modify anything. + However, if the OS vendor has configured distutils to install + somewhere other than sys.prefix (which could be a subdirectory of + sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal + and the domain of the OS vendor. (In other words, everything _other + than_ sys.prefix is considered local.) + + Caution: this function assumes the head of path has been normalized + with normalize_path. + """ + + path = normalize_path(path) + prefix = normalize_path(sys.prefix) + + if running_under_virtualenv(): + return path.startswith(normalize_path(sys.prefix)) + else: + from pip._internal.locations import distutils_scheme + if path.startswith(prefix): + for local_path in distutils_scheme("").values(): + if path.startswith(normalize_path(local_path)): + return True + return False + else: + return True + + +def dist_is_local(dist): + # type: (Distribution) -> bool + """ + Return True if given Distribution object is installed somewhere pip + is allowed to modify. + + """ + return is_local(dist_location(dist)) + + +def dist_in_usersite(dist): + # type: (Distribution) -> bool + """ + Return True if given Distribution is installed in user site. + """ + return dist_location(dist).startswith(normalize_path(user_site)) + + +def dist_in_site_packages(dist): + # type: (Distribution) -> bool + """ + Return True if given Distribution is installed in + sysconfig.get_python_lib(). + """ + return dist_location(dist).startswith(normalize_path(site_packages)) + + +def dist_is_editable(dist): + # type: (Distribution) -> bool + """ + Return True if given Distribution is an editable install. + """ + for path_item in sys.path: + egg_link = os.path.join(path_item, dist.project_name + '.egg-link') + if os.path.isfile(egg_link): + return True + return False + + +def get_installed_distributions( + local_only=True, # type: bool + skip=stdlib_pkgs, # type: Container[str] + include_editables=True, # type: bool + editables_only=False, # type: bool + user_only=False, # type: bool + paths=None # type: Optional[List[str]] +): + # type: (...) -> List[Distribution] + """ + Return a list of installed Distribution objects. + + If ``local_only`` is True (default), only return installations + local to the current virtualenv, if in a virtualenv. + + ``skip`` argument is an iterable of lower-case project names to + ignore; defaults to stdlib_pkgs + + If ``include_editables`` is False, don't report editables. + + If ``editables_only`` is True , only report editables. + + If ``user_only`` is True , only report installations in the user + site directory. + + If ``paths`` is set, only report the distributions present at the + specified list of locations. + """ + if paths: + working_set = pkg_resources.WorkingSet(paths) + else: + working_set = pkg_resources.working_set + + if local_only: + local_test = dist_is_local + else: + def local_test(d): + return True + + if include_editables: + def editable_test(d): + return True + else: + def editable_test(d): + return not dist_is_editable(d) + + if editables_only: + def editables_only_test(d): + return dist_is_editable(d) + else: + def editables_only_test(d): + return True + + if user_only: + user_test = dist_in_usersite + else: + def user_test(d): + return True + + return [d for d in working_set + if local_test(d) and + d.key not in skip and + editable_test(d) and + editables_only_test(d) and + user_test(d) + ] + + +def egg_link_path(dist): + # type: (Distribution) -> Optional[str] + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE + (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 + locations. + + This method will just return the first one found. + """ + sites = [] + if running_under_virtualenv(): + sites.append(site_packages) + if not virtualenv_no_global() and user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + for site in sites: + egglink = os.path.join(site, dist.project_name) + '.egg-link' + if os.path.isfile(egglink): + return egglink + return None + + +def dist_location(dist): + # type: (Distribution) -> str + """ + Get the site-packages location of this distribution. Generally + this is dist.location, except in the case of develop-installed + packages, where dist.location is the source code location, and we + want to know where the egg-link file is. + + The returned location is normalized (in particular, with symlinks removed). + """ + egg_link = egg_link_path(dist) + if egg_link: + return normalize_path(egg_link) + return normalize_path(dist.location) + + +def write_output(msg, *args): + # type: (str, str) -> None + logger.info(msg, *args) + + +class FakeFile(object): + """Wrap a list of lines in an object with readline() to make + ConfigParser happy.""" + def __init__(self, lines): + self._gen = (l for l in lines) + + def readline(self): + try: + try: + return next(self._gen) + except NameError: + return self._gen.next() + except StopIteration: + return '' + + def __iter__(self): + return self._gen + + +class StreamWrapper(StringIO): + + @classmethod + def from_stream(cls, orig_stream): + cls.orig_stream = orig_stream + return cls() + + # compileall.compile_dir() needs stdout.encoding to print to stdout + @property + def encoding(self): + return self.orig_stream.encoding + + +@contextlib.contextmanager +def captured_output(stream_name): + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO. + + Taken from Lib/support/__init__.py in the CPython repo. + """ + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + + +def captured_stdout(): + """Capture the output of sys.stdout: + + with captured_stdout() as stdout: + print('hello') + self.assertEqual(stdout.getvalue(), 'hello\n') + + Taken from Lib/support/__init__.py in the CPython repo. + """ + return captured_output('stdout') + + +def captured_stderr(): + """ + See captured_stdout(). + """ + return captured_output('stderr') + + +class cached_property(object): + """A property that is only computed once per instance and then replaces + itself with an ordinary attribute. Deleting the attribute resets the + property. + + Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 + """ + + def __init__(self, func): + self.__doc__ = getattr(func, '__doc__') + self.func = func + + def __get__(self, obj, cls): + if obj is None: + # We're being accessed from the class itself, not from an object + return self + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value + + +def get_installed_version(dist_name, working_set=None): + """Get the installed version of dist_name avoiding pkg_resources cache""" + # Create a requirement that we'll look for inside of setuptools. + req = pkg_resources.Requirement.parse(dist_name) + + if working_set is None: + # We want to avoid having this cached, so we need to construct a new + # working set each time. + working_set = pkg_resources.WorkingSet() + + # Get the installed distribution from our working set + dist = working_set.find(req) + + # Check to see if we got an installed distribution or not, if we did + # we want to return it's version. + return dist.version if dist else None + + +def consume(iterator): + """Consume an iterable at C speed.""" + deque(iterator, maxlen=0) + + +# Simulates an enum +def enum(*sequential, **named): + enums = dict(zip(sequential, range(len(sequential))), **named) + reverse = {value: key for key, value in enums.items()} + enums['reverse_mapping'] = reverse + return type('Enum', (), enums) + + +def build_netloc(host, port): + # type: (str, Optional[int]) -> str + """ + Build a netloc from a host-port pair + """ + if port is None: + return host + if ':' in host: + # Only wrap host with square brackets when it is IPv6 + host = '[{}]'.format(host) + return '{}:{}'.format(host, port) + + +def build_url_from_netloc(netloc, scheme='https'): + # type: (str, str) -> str + """ + Build a full URL from a netloc. + """ + if netloc.count(':') >= 2 and '@' not in netloc and '[' not in netloc: + # It must be a bare IPv6 address, so wrap it with brackets. + netloc = '[{}]'.format(netloc) + return '{}://{}'.format(scheme, netloc) + + +def parse_netloc(netloc): + # type: (str) -> Tuple[str, Optional[int]] + """ + Return the host-port pair from a netloc. + """ + url = build_url_from_netloc(netloc) + parsed = urllib_parse.urlparse(url) + return parsed.hostname, parsed.port + + +def split_auth_from_netloc(netloc): + """ + Parse out and remove the auth information from a netloc. + + Returns: (netloc, (username, password)). + """ + if '@' not in netloc: + return netloc, (None, None) + + # Split from the right because that's how urllib.parse.urlsplit() + # behaves if more than one @ is present (which can be checked using + # the password attribute of urlsplit()'s return value). + auth, netloc = netloc.rsplit('@', 1) + if ':' in auth: + # Split from the left because that's how urllib.parse.urlsplit() + # behaves if more than one : is present (which again can be checked + # using the password attribute of the return value) + user_pass = auth.split(':', 1) + else: + user_pass = auth, None + + user_pass = tuple( + None if x is None else urllib_unquote(x) for x in user_pass + ) + + return netloc, user_pass + + +def redact_netloc(netloc): + # type: (str) -> str + """ + Replace the sensitive data in a netloc with "****", if it exists. + + For example: + - "user:pass@example.com" returns "user:****@example.com" + - "accesstoken@example.com" returns "****@example.com" + """ + netloc, (user, password) = split_auth_from_netloc(netloc) + if user is None: + return netloc + if password is None: + user = '****' + password = '' + else: + user = urllib_parse.quote(user) + password = ':****' + return '{user}{password}@{netloc}'.format(user=user, + password=password, + netloc=netloc) + + +def _transform_url(url, transform_netloc): + """Transform and replace netloc in a url. + + transform_netloc is a function taking the netloc and returning a + tuple. The first element of this tuple is the new netloc. The + entire tuple is returned. + + Returns a tuple containing the transformed url as item 0 and the + original tuple returned by transform_netloc as item 1. + """ + purl = urllib_parse.urlsplit(url) + netloc_tuple = transform_netloc(purl.netloc) + # stripped url + url_pieces = ( + purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment + ) + surl = urllib_parse.urlunsplit(url_pieces) + return surl, netloc_tuple + + +def _get_netloc(netloc): + return split_auth_from_netloc(netloc) + + +def _redact_netloc(netloc): + return (redact_netloc(netloc),) + + +def split_auth_netloc_from_url(url): + # type: (str) -> Tuple[str, str, Tuple[str, str]] + """ + Parse a url into separate netloc, auth, and url with no auth. + + Returns: (url_without_auth, netloc, (username, password)) + """ + url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc) + return url_without_auth, netloc, auth + + +def remove_auth_from_url(url): + # type: (str) -> str + """Return a copy of url with 'username:password@' removed.""" + # username/pass params are passed to subversion through flags + # and are not recognized in the url. + return _transform_url(url, _get_netloc)[0] + + +def redact_auth_from_url(url): + # type: (str) -> str + """Replace the password in a given url with ****.""" + return _transform_url(url, _redact_netloc)[0] + + +class HiddenText(object): + def __init__( + self, + secret, # type: str + redacted, # type: str + ): + # type: (...) -> None + self.secret = secret + self.redacted = redacted + + def __repr__(self): + # type: (...) -> str + return '<HiddenText {!r}>'.format(str(self)) + + def __str__(self): + # type: (...) -> str + return self.redacted + + # This is useful for testing. + def __eq__(self, other): + # type: (Any) -> bool + if type(self) != type(other): + return False + + # The string being used for redaction doesn't also have to match, + # just the raw, original string. + return (self.secret == other.secret) + + # We need to provide an explicit __ne__ implementation for Python 2. + # TODO: remove this when we drop PY2 support. + def __ne__(self, other): + # type: (Any) -> bool + return not self == other + + +def hide_value(value): + # type: (str) -> HiddenText + return HiddenText(value, redacted='****') + + +def hide_url(url): + # type: (str) -> HiddenText + redacted = redact_auth_from_url(url) + return HiddenText(url, redacted=redacted) + + +def protect_pip_from_modification_on_windows(modifying_pip): + # type: (bool) -> None + """Protection of pip.exe from modification on Windows + + On Windows, any operation modifying pip should be run as: + python -m pip ... + """ + pip_names = [ + "pip.exe", + "pip{}.exe".format(sys.version_info[0]), + "pip{}.{}.exe".format(*sys.version_info[:2]) + ] + + # See https://github.com/pypa/pip/issues/1299 for more discussion + should_show_use_python_msg = ( + modifying_pip and + WINDOWS and + os.path.basename(sys.argv[0]) in pip_names + ) + + if should_show_use_python_msg: + new_command = [ + sys.executable, "-m", "pip" + ] + sys.argv[1:] + raise CommandError( + 'To modify pip, please run the following command:\n{}' + .format(" ".join(new_command)) + ) + + +def is_console_interactive(): + # type: () -> bool + """Is this console interactive? + """ + return sys.stdin is not None and sys.stdin.isatty() + + +def hash_file(path, blocksize=1 << 20): + # type: (str, int) -> Tuple[Any, int] + """Return (hash, length) for path using hashlib.sha256() + """ + + h = hashlib.sha256() + length = 0 + with open(path, 'rb') as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + return h, length + + +def is_wheel_installed(): + """ + Return whether the wheel package is installed. + """ + try: + import wheel # noqa: F401 + except ImportError: + return False + + return True diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/models.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/models.py new file mode 100644 index 0000000000000000000000000000000000000000..29e1441153b63446220a5e1867e691183e0d22d7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/models.py @@ -0,0 +1,42 @@ +"""Utilities for defining models +""" +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +import operator + + +class KeyBasedCompareMixin(object): + """Provides comparison capabilities that is based on a key + """ + + def __init__(self, key, defining_class): + self._compare_key = key + self._defining_class = defining_class + + def __hash__(self): + return hash(self._compare_key) + + def __lt__(self, other): + return self._compare(other, operator.__lt__) + + def __le__(self, other): + return self._compare(other, operator.__le__) + + def __gt__(self, other): + return self._compare(other, operator.__gt__) + + def __ge__(self, other): + return self._compare(other, operator.__ge__) + + def __eq__(self, other): + return self._compare(other, operator.__eq__) + + def __ne__(self, other): + return self._compare(other, operator.__ne__) + + def _compare(self, other, method): + if not isinstance(other, self._defining_class): + return NotImplemented + + return method(self._compare_key, other._compare_key) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/packaging.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/packaging.py new file mode 100644 index 0000000000000000000000000000000000000000..68aa86edbf012c68ceadbe67e21e5d6c9ebbc0ab --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/packaging.py @@ -0,0 +1,94 @@ +from __future__ import absolute_import + +import logging +from email.parser import FeedParser + +from pip._vendor import pkg_resources +from pip._vendor.packaging import specifiers, version + +from pip._internal.exceptions import NoneMetadataError +from pip._internal.utils.misc import display_path +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from email.message import Message + from pip._vendor.pkg_resources import Distribution + + +logger = logging.getLogger(__name__) + + +def check_requires_python(requires_python, version_info): + # type: (Optional[str], Tuple[int, ...]) -> bool + """ + Check if the given Python version matches a "Requires-Python" specifier. + + :param version_info: A 3-tuple of ints representing a Python + major-minor-micro version to check (e.g. `sys.version_info[:3]`). + + :return: `True` if the given Python version satisfies the requirement. + Otherwise, return `False`. + + :raises InvalidSpecifier: If `requires_python` has an invalid format. + """ + if requires_python is None: + # The package provides no information + return True + requires_python_specifier = specifiers.SpecifierSet(requires_python) + + python_version = version.parse('.'.join(map(str, version_info))) + return python_version in requires_python_specifier + + +def get_metadata(dist): + # type: (Distribution) -> Message + """ + :raises NoneMetadataError: if the distribution reports `has_metadata()` + True but `get_metadata()` returns None. + """ + metadata_name = 'METADATA' + if (isinstance(dist, pkg_resources.DistInfoDistribution) and + dist.has_metadata(metadata_name)): + metadata = dist.get_metadata(metadata_name) + elif dist.has_metadata('PKG-INFO'): + metadata_name = 'PKG-INFO' + metadata = dist.get_metadata(metadata_name) + else: + logger.warning("No metadata found in %s", display_path(dist.location)) + metadata = '' + + if metadata is None: + raise NoneMetadataError(dist, metadata_name) + + feed_parser = FeedParser() + # The following line errors out if with a "NoneType" TypeError if + # passed metadata=None. + feed_parser.feed(metadata) + return feed_parser.close() + + +def get_requires_python(dist): + # type: (pkg_resources.Distribution) -> Optional[str] + """ + Return the "Requires-Python" metadata for a distribution, or None + if not present. + """ + pkg_info_dict = get_metadata(dist) + requires_python = pkg_info_dict.get('Requires-Python') + + if requires_python is not None: + # Convert to a str to satisfy the type checker, since requires_python + # can be a Header object. + requires_python = str(requires_python) + + return requires_python + + +def get_installer(dist): + # type: (Distribution) -> str + if dist.has_metadata('INSTALLER'): + for line in dist.get_metadata_lines('INSTALLER'): + if line.strip(): + return line.strip() + return '' diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/pkg_resources.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/pkg_resources.py new file mode 100644 index 0000000000000000000000000000000000000000..0bc129acc6ab582eb087be7ee186c554dc5feba1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/pkg_resources.py @@ -0,0 +1,44 @@ +from pip._vendor.pkg_resources import yield_lines +from pip._vendor.six import ensure_str + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Dict, Iterable, List + + +class DictMetadata(object): + """IMetadataProvider that reads metadata files from a dictionary. + """ + def __init__(self, metadata): + # type: (Dict[str, bytes]) -> None + self._metadata = metadata + + def has_metadata(self, name): + # type: (str) -> bool + return name in self._metadata + + def get_metadata(self, name): + # type: (str) -> str + try: + return ensure_str(self._metadata[name]) + except UnicodeDecodeError as e: + # Mirrors handling done in pkg_resources.NullProvider. + e.reason += " in {} file".format(name) + raise + + def get_metadata_lines(self, name): + # type: (str) -> Iterable[str] + return yield_lines(self.get_metadata(name)) + + def metadata_isdir(self, name): + # type: (str) -> bool + return False + + def metadata_listdir(self, name): + # type: (str) -> List[str] + return [] + + def run_script(self, script_name, namespace): + # type: (str, str) -> None + pass diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/setuptools_build.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/setuptools_build.py new file mode 100644 index 0000000000000000000000000000000000000000..4147a650dca185dcd4491b805d0bdb0775eff924 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/setuptools_build.py @@ -0,0 +1,181 @@ +import sys + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional, Sequence + +# Shim to wrap setup.py invocation with setuptools +# +# We set sys.argv[0] to the path to the underlying setup.py file so +# setuptools / distutils don't take the path to the setup.py to be "-c" when +# invoking via the shim. This avoids e.g. the following manifest_maker +# warning: "warning: manifest_maker: standard file '-c' not found". +_SETUPTOOLS_SHIM = ( + "import sys, setuptools, tokenize; sys.argv[0] = {0!r}; __file__={0!r};" + "f=getattr(tokenize, 'open', open)(__file__);" + "code=f.read().replace('\\r\\n', '\\n');" + "f.close();" + "exec(compile(code, __file__, 'exec'))" +) + + +def make_setuptools_shim_args( + setup_py_path, # type: str + global_options=None, # type: Sequence[str] + no_user_config=False, # type: bool + unbuffered_output=False # type: bool +): + # type: (...) -> List[str] + """ + Get setuptools command arguments with shim wrapped setup file invocation. + + :param setup_py_path: The path to setup.py to be wrapped. + :param global_options: Additional global options. + :param no_user_config: If True, disables personal user configuration. + :param unbuffered_output: If True, adds the unbuffered switch to the + argument list. + """ + args = [sys.executable] + if unbuffered_output: + args += ["-u"] + args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] + if global_options: + args += global_options + if no_user_config: + args += ["--no-user-cfg"] + return args + + +def make_setuptools_bdist_wheel_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + build_options, # type: Sequence[str] + destination_dir, # type: str +): + # type: (...) -> List[str] + # NOTE: Eventually, we'd want to also -S to the flags here, when we're + # isolating. Currently, it breaks Python in virtualenvs, because it + # relies on site.py to find parts of the standard library outside the + # virtualenv. + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + unbuffered_output=True + ) + args += ["bdist_wheel", "-d", destination_dir] + args += build_options + return args + + +def make_setuptools_clean_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] +): + # type: (...) -> List[str] + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + unbuffered_output=True + ) + args += ["clean", "--all"] + return args + + +def make_setuptools_develop_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + no_user_config, # type: bool + prefix, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + ) + + args += ["develop", "--no-deps"] + + args += install_options + + if prefix: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + + if use_user_site: + args += ["--user", "--prefix="] + + return args + + +def make_setuptools_egg_info_args( + setup_py_path, # type: str + egg_info_dir, # type: Optional[str] + no_user_config, # type: bool +): + # type: (...) -> List[str] + args = make_setuptools_shim_args(setup_py_path) + if no_user_config: + args += ["--no-user-cfg"] + + args += ["egg_info"] + + if egg_info_dir: + args += ["--egg-base", egg_info_dir] + + return args + + +def make_setuptools_install_args( + setup_py_path, # type: str + global_options, # type: Sequence[str] + install_options, # type: Sequence[str] + record_filename, # type: str + root, # type: Optional[str] + prefix, # type: Optional[str] + header_dir, # type: Optional[str] + home, # type: Optional[str] + use_user_site, # type: bool + no_user_config, # type: bool + pycompile # type: bool +): + # type: (...) -> List[str] + assert not (use_user_site and prefix) + assert not (use_user_site and root) + + args = make_setuptools_shim_args( + setup_py_path, + global_options=global_options, + no_user_config=no_user_config, + unbuffered_output=True + ) + args += ["install", "--record", record_filename] + args += ["--single-version-externally-managed"] + + if root is not None: + args += ["--root", root] + if prefix is not None: + args += ["--prefix", prefix] + if home is not None: + args += ["--home", home] + if use_user_site: + args += ["--user", "--prefix="] + + if pycompile: + args += ["--compile"] + else: + args += ["--no-compile"] + + if header_dir: + args += ["--install-headers", header_dir] + + args += install_options + + return args diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/subprocess.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/subprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..ea0176d341ec037e72399b43709aaa837f9c4744 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/subprocess.py @@ -0,0 +1,278 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +from __future__ import absolute_import + +import logging +import os +import subprocess + +from pip._vendor.six.moves import shlex_quote + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.compat import console_to_str, str_to_display +from pip._internal.utils.logging import subprocess_logger +from pip._internal.utils.misc import HiddenText, path_to_display +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.ui import open_spinner + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Iterable, List, Mapping, Optional, Text, Union, + ) + from pip._internal.utils.ui import SpinnerInterface + + CommandArgs = List[Union[str, HiddenText]] + + +LOG_DIVIDER = '----------------------------------------' + + +def make_command(*args): + # type: (Union[str, HiddenText, CommandArgs]) -> CommandArgs + """ + Create a CommandArgs object. + """ + command_args = [] # type: CommandArgs + for arg in args: + # Check for list instead of CommandArgs since CommandArgs is + # only known during type-checking. + if isinstance(arg, list): + command_args.extend(arg) + else: + # Otherwise, arg is str or HiddenText. + command_args.append(arg) + + return command_args + + +def format_command_args(args): + # type: (Union[List[str], CommandArgs]) -> str + """ + Format command arguments for display. + """ + # For HiddenText arguments, display the redacted form by calling str(). + # Also, we don't apply str() to arguments that aren't HiddenText since + # this can trigger a UnicodeDecodeError in Python 2 if the argument + # has type unicode and includes a non-ascii character. (The type + # checker doesn't ensure the annotations are correct in all cases.) + return ' '.join( + shlex_quote(str(arg)) if isinstance(arg, HiddenText) + else shlex_quote(arg) for arg in args + ) + + +def reveal_command_args(args): + # type: (Union[List[str], CommandArgs]) -> List[str] + """ + Return the arguments in their raw, unredacted form. + """ + return [ + arg.secret if isinstance(arg, HiddenText) else arg for arg in args + ] + + +def make_subprocess_output_error( + cmd_args, # type: Union[List[str], CommandArgs] + cwd, # type: Optional[str] + lines, # type: List[Text] + exit_status, # type: int +): + # type: (...) -> Text + """ + Create and return the error message to use to log a subprocess error + with command output. + + :param lines: A list of lines, each ending with a newline. + """ + command = format_command_args(cmd_args) + # Convert `command` and `cwd` to text (unicode in Python 2) so we can use + # them as arguments in the unicode format string below. This avoids + # "UnicodeDecodeError: 'ascii' codec can't decode byte ..." in Python 2 + # if either contains a non-ascii character. + command_display = str_to_display(command, desc='command bytes') + cwd_display = path_to_display(cwd) + + # We know the joined output value ends in a newline. + output = ''.join(lines) + msg = ( + # Use a unicode string to avoid "UnicodeEncodeError: 'ascii' + # codec can't encode character ..." in Python 2 when a format + # argument (e.g. `output`) has a non-ascii character. + u'Command errored out with exit status {exit_status}:\n' + ' command: {command_display}\n' + ' cwd: {cwd_display}\n' + 'Complete output ({line_count} lines):\n{output}{divider}' + ).format( + exit_status=exit_status, + command_display=command_display, + cwd_display=cwd_display, + line_count=len(lines), + output=output, + divider=LOG_DIVIDER, + ) + return msg + + +def call_subprocess( + cmd, # type: Union[List[str], CommandArgs] + show_stdout=False, # type: bool + cwd=None, # type: Optional[str] + on_returncode='raise', # type: str + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + command_desc=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + unset_environ=None, # type: Optional[Iterable[str]] + spinner=None, # type: Optional[SpinnerInterface] + log_failed_cmd=True # type: Optional[bool] +): + # type: (...) -> Text + """ + Args: + show_stdout: if true, use INFO to log the subprocess's stderr and + stdout streams. Otherwise, use DEBUG. Defaults to False. + extra_ok_returncodes: an iterable of integer return codes that are + acceptable, in addition to 0. Defaults to None, which means []. + unset_environ: an iterable of environment variable names to unset + prior to calling subprocess.Popen(). + log_failed_cmd: if false, failed commands are not logged, only raised. + """ + if extra_ok_returncodes is None: + extra_ok_returncodes = [] + if unset_environ is None: + unset_environ = [] + # Most places in pip use show_stdout=False. What this means is-- + # + # - We connect the child's output (combined stderr and stdout) to a + # single pipe, which we read. + # - We log this output to stderr at DEBUG level as it is received. + # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't + # requested), then we show a spinner so the user can still see the + # subprocess is in progress. + # - If the subprocess exits with an error, we log the output to stderr + # at ERROR level if it hasn't already been displayed to the console + # (e.g. if --verbose logging wasn't enabled). This way we don't log + # the output to the console twice. + # + # If show_stdout=True, then the above is still done, but with DEBUG + # replaced by INFO. + if show_stdout: + # Then log the subprocess output at INFO level. + log_subprocess = subprocess_logger.info + used_level = logging.INFO + else: + # Then log the subprocess output using DEBUG. This also ensures + # it will be logged to the log file (aka user_log), if enabled. + log_subprocess = subprocess_logger.debug + used_level = logging.DEBUG + + # Whether the subprocess will be visible in the console. + showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level + + # Only use the spinner if we're not showing the subprocess output + # and we have a spinner. + use_spinner = not showing_subprocess and spinner is not None + + if command_desc is None: + command_desc = format_command_args(cmd) + + log_subprocess("Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + for name in unset_environ: + env.pop(name, None) + try: + proc = subprocess.Popen( + # Convert HiddenText objects to the underlying str. + reveal_command_args(cmd), + stderr=subprocess.STDOUT, stdin=subprocess.PIPE, + stdout=subprocess.PIPE, cwd=cwd, env=env, + ) + proc.stdin.close() + except Exception as exc: + if log_failed_cmd: + subprocess_logger.critical( + "Error %s while executing command %s", exc, command_desc, + ) + raise + all_output = [] + while True: + # The "line" value is a unicode string in Python 2. + line = console_to_str(proc.stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + + # Show the line immediately. + log_subprocess(line) + # Update the spinner. + if use_spinner: + spinner.spin() + try: + proc.wait() + finally: + if proc.stdout: + proc.stdout.close() + proc_had_error = ( + proc.returncode and proc.returncode not in extra_ok_returncodes + ) + if use_spinner: + if proc_had_error: + spinner.finish("error") + else: + spinner.finish("done") + if proc_had_error: + if on_returncode == 'raise': + if not showing_subprocess and log_failed_cmd: + # Then the subprocess streams haven't been logged to the + # console yet. + msg = make_subprocess_output_error( + cmd_args=cmd, + cwd=cwd, + lines=all_output, + exit_status=proc.returncode, + ) + subprocess_logger.error(msg) + exc_msg = ( + 'Command errored out with exit status {}: {} ' + 'Check the logs for full command output.' + ).format(proc.returncode, command_desc) + raise InstallationError(exc_msg) + elif on_returncode == 'warn': + subprocess_logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, proc.returncode, cwd, + ) + elif on_returncode == 'ignore': + pass + else: + raise ValueError('Invalid value: on_returncode=%s' % + repr(on_returncode)) + return ''.join(all_output) + + +def runner_with_spinner_message(message): + # type: (str) -> Callable[..., None] + """Provide a subprocess_runner that shows a spinner message. + + Intended for use with for pep517's Pep517HookCaller. Thus, the runner has + an API that matches what's expected by Pep517HookCaller.subprocess_runner. + """ + + def runner( + cmd, # type: List[str] + cwd=None, # type: Optional[str] + extra_environ=None # type: Optional[Mapping[str, Any]] + ): + # type: (...) -> None + with open_spinner(message) as spinner: + call_subprocess( + cmd, + cwd=cwd, + extra_environ=extra_environ, + spinner=spinner, + ) + + return runner diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/temp_dir.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/temp_dir.py new file mode 100644 index 0000000000000000000000000000000000000000..65e41bc70e2d8184b8917b539726556c84f2c0df --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/temp_dir.py @@ -0,0 +1,250 @@ +from __future__ import absolute_import + +import errno +import itertools +import logging +import os.path +import tempfile +from contextlib import contextmanager + +from pip._vendor.contextlib2 import ExitStack + +from pip._internal.utils.misc import rmtree +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Dict, Iterator, Optional, TypeVar + + _T = TypeVar('_T', bound='TempDirectory') + + +logger = logging.getLogger(__name__) + + +_tempdir_manager = None # type: Optional[ExitStack] + + +@contextmanager +def global_tempdir_manager(): + # type: () -> Iterator[None] + global _tempdir_manager + with ExitStack() as stack: + old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack + try: + yield + finally: + _tempdir_manager = old_tempdir_manager + + +class TempDirectoryTypeRegistry(object): + """Manages temp directory behavior + """ + + def __init__(self): + # type: () -> None + self._should_delete = {} # type: Dict[str, bool] + + def set_delete(self, kind, value): + # type: (str, bool) -> None + """Indicate whether a TempDirectory of the given kind should be + auto-deleted. + """ + self._should_delete[kind] = value + + def get_delete(self, kind): + # type: (str) -> bool + """Get configured auto-delete flag for a given TempDirectory type, + default True. + """ + return self._should_delete.get(kind, True) + + +_tempdir_registry = None # type: Optional[TempDirectoryTypeRegistry] + + +@contextmanager +def tempdir_registry(): + # type: () -> Iterator[TempDirectoryTypeRegistry] + """Provides a scoped global tempdir registry that can be used to dictate + whether directories should be deleted. + """ + global _tempdir_registry + old_tempdir_registry = _tempdir_registry + _tempdir_registry = TempDirectoryTypeRegistry() + try: + yield _tempdir_registry + finally: + _tempdir_registry = old_tempdir_registry + + +class TempDirectory(object): + """Helper class that owns and cleans up a temporary directory. + + This class can be used as a context manager or as an OO representation of a + temporary directory. + + Attributes: + path + Location to the created temporary directory + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + Methods: + cleanup() + Deletes the temporary directory + + When used as a context manager, if the delete attribute is True, on + exiting the context the temporary directory is deleted. + """ + + def __init__( + self, + path=None, # type: Optional[str] + delete=None, # type: Optional[bool] + kind="temp", # type: str + globally_managed=False, # type: bool + ): + super(TempDirectory, self).__init__() + + # If we were given an explicit directory, resolve delete option now. + # Otherwise we wait until cleanup and see what tempdir_registry says. + if path is not None and delete is None: + delete = False + + if path is None: + path = self._create(kind) + + self._path = path + self._deleted = False + self.delete = delete + self.kind = kind + + if globally_managed: + assert _tempdir_manager is not None + _tempdir_manager.enter_context(self) + + @property + def path(self): + # type: () -> str + assert not self._deleted, ( + "Attempted to access deleted path: {}".format(self._path) + ) + return self._path + + def __repr__(self): + # type: () -> str + return "<{} {!r}>".format(self.__class__.__name__, self.path) + + def __enter__(self): + # type: (_T) -> _T + return self + + def __exit__(self, exc, value, tb): + # type: (Any, Any, Any) -> None + if self.delete is not None: + delete = self.delete + elif _tempdir_registry: + delete = _tempdir_registry.get_delete(self.kind) + else: + delete = True + + if delete: + self.cleanup() + + def _create(self, kind): + # type: (str) -> str + """Create a temporary directory and store its path in self.path + """ + # We realpath here because some systems have their default tmpdir + # symlinked to another directory. This tends to confuse build + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + path = os.path.realpath( + tempfile.mkdtemp(prefix="pip-{}-".format(kind)) + ) + logger.debug("Created temporary directory: {}".format(path)) + return path + + def cleanup(self): + # type: () -> None + """Remove the temporary directory created and reset state + """ + self._deleted = True + if os.path.exists(self._path): + rmtree(self._path) + + +class AdjacentTempDirectory(TempDirectory): + """Helper class that creates a temporary directory adjacent to a real one. + + Attributes: + original + The original directory to create a temp directory for. + path + After calling create() or entering, contains the full + path to the temporary directory. + delete + Whether the directory should be deleted when exiting + (when used as a contextmanager) + + """ + # The characters that may be used to name the temp directory + # We always prepend a ~ and then rotate through these until + # a usable name is found. + # pkg_resources raises a different error for .dist-info folder + # with leading '-' and invalid metadata + LEADING_CHARS = "-~.=%0123456789" + + def __init__(self, original, delete=None): + # type: (str, Optional[bool]) -> None + self.original = original.rstrip('/\\') + super(AdjacentTempDirectory, self).__init__(delete=delete) + + @classmethod + def _generate_names(cls, name): + # type: (str) -> Iterator[str] + """Generates a series of temporary names. + + The algorithm replaces the leading characters in the name + with ones that are valid filesystem characters, but are not + valid package names (for both Python and pip definitions of + package). + """ + for i in range(1, len(name)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i - 1): + new_name = '~' + ''.join(candidate) + name[i:] + if new_name != name: + yield new_name + + # If we make it this far, we will have to make a longer name + for i in range(len(cls.LEADING_CHARS)): + for candidate in itertools.combinations_with_replacement( + cls.LEADING_CHARS, i): + new_name = '~' + ''.join(candidate) + name + if new_name != name: + yield new_name + + def _create(self, kind): + # type: (str) -> str + root, name = os.path.split(self.original) + for candidate in self._generate_names(name): + path = os.path.join(root, candidate) + try: + os.mkdir(path) + except OSError as ex: + # Continue if the name exists already + if ex.errno != errno.EEXIST: + raise + else: + path = os.path.realpath(path) + break + else: + # Final fallback on the default behavior. + path = os.path.realpath( + tempfile.mkdtemp(prefix="pip-{}-".format(kind)) + ) + + logger.debug("Created temporary directory: {}".format(path)) + return path diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/typing.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/typing.py new file mode 100644 index 0000000000000000000000000000000000000000..8505a29b15d5f8a3565a52796c4e39cc6b826ffc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/typing.py @@ -0,0 +1,38 @@ +"""For neatly implementing static typing in pip. + +`mypy` - the static type analysis tool we use - uses the `typing` module, which +provides core functionality fundamental to mypy's functioning. + +Generally, `typing` would be imported at runtime and used in that fashion - +it acts as a no-op at runtime and does not have any run-time overhead by +design. + +As it turns out, `typing` is not vendorable - it uses separate sources for +Python 2/Python 3. Thus, this codebase can not expect it to be present. +To work around this, mypy allows the typing import to be behind a False-y +optional to prevent it from running at runtime and type-comments can be used +to remove the need for the types to be accessible directly during runtime. + +This module provides the False-y guard in a nicely named fashion so that a +curious maintainer can reach here to read this. + +In pip, all static-typing related imports should be guarded as follows: + + from pip._internal.utils.typing import MYPY_CHECK_RUNNING + + if MYPY_CHECK_RUNNING: + from typing import ... + +Ref: https://github.com/python/mypy/issues/3216 +""" + +MYPY_CHECK_RUNNING = False + + +if MYPY_CHECK_RUNNING: + from typing import cast +else: + # typing's cast() is needed at runtime, but we don't want to import typing. + # Thus, we use a dummy no-op version, which we tell mypy to ignore. + def cast(type_, value): # type: ignore + return value diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/ui.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/ui.py new file mode 100644 index 0000000000000000000000000000000000000000..87782aa641d5dfe4845f751c8fcc05658da91501 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/ui.py @@ -0,0 +1,428 @@ +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import, division + +import contextlib +import itertools +import logging +import sys +import time +from signal import SIGINT, default_int_handler, signal + +from pip._vendor import six +from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR +from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar +from pip._vendor.progress.spinner import Spinner + +from pip._internal.utils.compat import WINDOWS +from pip._internal.utils.logging import get_indentation +from pip._internal.utils.misc import format_size +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Iterator, IO + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + +logger = logging.getLogger(__name__) + + +def _select_progress_class(preferred, fallback): + encoding = getattr(preferred.file, "encoding", None) + + # If we don't know what encoding this file is in, then we'll just assume + # that it doesn't support unicode and use the ASCII bar. + if not encoding: + return fallback + + # Collect all of the possible characters we want to use with the preferred + # bar. + characters = [ + getattr(preferred, "empty_fill", six.text_type()), + getattr(preferred, "fill", six.text_type()), + ] + characters += list(getattr(preferred, "phases", [])) + + # Try to decode the characters we're using for the bar using the encoding + # of the given file, if this works then we'll assume that we can use the + # fancier bar and if not we'll fall back to the plaintext bar. + try: + six.text_type().join(characters).encode(encoding) + except UnicodeEncodeError: + return fallback + else: + return preferred + + +_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any + + +class InterruptibleMixin(object): + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + + This allows downloads to be interrupted without leaving temporary state + (like hidden cursors) behind. + + This class is similar to the progress library's existing SigIntMixin + helper, but as of version 1.2, that helper has the following problems: + + 1. It calls sys.exit(). + 2. It discards the existing SIGINT handler completely. + 3. It leaves its own handler in place even after an uninterrupted finish, + which will have unexpected delayed effects if the user triggers an + unrelated keyboard interrupt some time after a progress-displaying + download has already completed, for example. + """ + + def __init__(self, *args, **kwargs): + """ + Save the original SIGINT handler for later. + """ + super(InterruptibleMixin, self).__init__(*args, **kwargs) + + self.original_handler = signal(SIGINT, self.handle_sigint) + + # If signal() returns None, the previous handler was not installed from + # Python, and we cannot restore it. This probably should not happen, + # but if it does, we must restore something sensible instead, at least. + # The least bad option should be Python's default SIGINT handler, which + # just raises KeyboardInterrupt. + if self.original_handler is None: + self.original_handler = default_int_handler + + def finish(self): + """ + Restore the original SIGINT handler after finishing. + + This should happen regardless of whether the progress display finishes + normally, or gets interrupted. + """ + super(InterruptibleMixin, self).finish() + signal(SIGINT, self.original_handler) + + def handle_sigint(self, signum, frame): + """ + Call self.finish() before delegating to the original SIGINT handler. + + This handler should only be in place while the progress display is + active. + """ + self.finish() + self.original_handler(signum, frame) + + +class SilentBar(Bar): + + def update(self): + pass + + +class BlueEmojiBar(IncrementalBar): + + suffix = "%(percent)d%%" + bar_prefix = " " + bar_suffix = " " + phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any + + +class DownloadProgressMixin(object): + + def __init__(self, *args, **kwargs): + super(DownloadProgressMixin, self).__init__(*args, **kwargs) + self.message = (" " * (get_indentation() + 2)) + self.message + + @property + def downloaded(self): + return format_size(self.index) + + @property + def download_speed(self): + # Avoid zero division errors... + if self.avg == 0.0: + return "..." + return format_size(1 / self.avg) + "/s" + + @property + def pretty_eta(self): + if self.eta: + return "eta %s" % self.eta_td + return "" + + def iter(self, it): + for x in it: + yield x + self.next(len(x)) + self.finish() + + +class WindowsMixin(object): + + def __init__(self, *args, **kwargs): + # The Windows terminal does not support the hide/show cursor ANSI codes + # even with colorama. So we'll ensure that hide_cursor is False on + # Windows. + # This call needs to go before the super() call, so that hide_cursor + # is set in time. The base progress bar class writes the "hide cursor" + # code to the terminal in its init, so if we don't set this soon + # enough, we get a "hide" with no corresponding "show"... + if WINDOWS and self.hide_cursor: + self.hide_cursor = False + + super(WindowsMixin, self).__init__(*args, **kwargs) + + # Check if we are running on Windows and we have the colorama module, + # if we do then wrap our file with it. + if WINDOWS and colorama: + self.file = colorama.AnsiToWin32(self.file) + # The progress code expects to be able to call self.file.isatty() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.isatty = lambda: self.file.wrapped.isatty() + # The progress code expects to be able to call self.file.flush() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.flush = lambda: self.file.wrapped.flush() + + +class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin): + + file = sys.stdout + message = "%(percent)d%%" + suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" + +# NOTE: The "type: ignore" comments on the following classes are there to +# work around https://github.com/python/typing/issues/241 + + +class DefaultDownloadProgressBar(BaseDownloadProgressBar, + _BaseBar): + pass + + +class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore + pass + + +class DownloadBar(BaseDownloadProgressBar, # type: ignore + Bar): + pass + + +class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore + FillingCirclesBar): + pass + + +class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore + BlueEmojiBar): + pass + + +class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin, Spinner): + + file = sys.stdout + suffix = "%(downloaded)s %(download_speed)s" + + def next_phase(self): + if not hasattr(self, "_phaser"): + self._phaser = itertools.cycle(self.phases) + return next(self._phaser) + + def update(self): + message = self.message % self + phase = self.next_phase() + suffix = self.suffix % self + line = ''.join([ + message, + " " if message else "", + phase, + " " if suffix else "", + suffix, + ]) + + self.writeln(line) + + +BAR_TYPES = { + "off": (DownloadSilentBar, DownloadSilentBar), + "on": (DefaultDownloadProgressBar, DownloadProgressSpinner), + "ascii": (DownloadBar, DownloadProgressSpinner), + "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner), + "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner) +} + + +def DownloadProgressProvider(progress_bar, max=None): + if max is None or max == 0: + return BAR_TYPES[progress_bar][1]().iter + else: + return BAR_TYPES[progress_bar][0](max=max).iter + + +################################################################ +# Generic "something is happening" spinners +# +# We don't even try using progress.spinner.Spinner here because it's actually +# simpler to reimplement from scratch than to coerce their code into doing +# what we need. +################################################################ + +@contextlib.contextmanager +def hidden_cursor(file): + # type: (IO[Any]) -> Iterator[None] + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) + + +class RateLimiter(object): + def __init__(self, min_update_interval_seconds): + # type: (float) -> None + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update = 0 # type: float + + def ready(self): + # type: () -> bool + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self): + # type: () -> None + self._last_update = time.time() + + +class SpinnerInterface(object): + def spin(self): + # type: () -> None + raise NotImplementedError() + + def finish(self, final_status): + # type: (str) -> None + raise NotImplementedError() + + +class InteractiveSpinner(SpinnerInterface): + def __init__(self, message, file=None, spin_chars="-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds=0.125): + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status): + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self): + # type: () -> None + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status): + # type: (str) -> None + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(SpinnerInterface): + def __init__(self, message, min_update_interval_seconds=60): + # type: (str, float) -> None + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status): + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self): + # type: () -> None + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status): + # type: (str) -> None + if self._finished: + return + self._update("finished with status '%s'" % (final_status,)) + self._finished = True + + +@contextlib.contextmanager +def open_spinner(message): + # type: (str) -> Iterator[SpinnerInterface] + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner = InteractiveSpinner(message) # type: SpinnerInterface + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/unpacking.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/unpacking.py new file mode 100644 index 0000000000000000000000000000000000000000..7252dc217bfaece6fedbaf835cecbb2a06cdcbb0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/unpacking.py @@ -0,0 +1,272 @@ +"""Utilities related archives. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import os +import shutil +import stat +import tarfile +import zipfile + +from pip._internal.exceptions import InstallationError +from pip._internal.utils.filetypes import ( + BZ2_EXTENSIONS, + TAR_EXTENSIONS, + XZ_EXTENSIONS, + ZIP_EXTENSIONS, +) +from pip._internal.utils.misc import ensure_dir +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Iterable, List, Optional, Text, Union + + +logger = logging.getLogger(__name__) + + +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS + +try: + import bz2 # noqa + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug('bz2 module is not available') + +try: + # Only for Python 3.3+ + import lzma # noqa + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug('lzma module is not available') + + +def current_umask(): + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def split_leading_dir(path): + # type: (Union[str, Text]) -> List[Union[str, Text]] + path = path.lstrip('/').lstrip('\\') + if ( + '/' in path and ( + ('\\' in path and path.find('/') < path.find('\\')) or + '\\' not in path + ) + ): + return path.split('/', 1) + elif '\\' in path: + return path.split('\\', 1) + else: + return [path, ''] + + +def has_leading_dir(paths): + # type: (Iterable[Union[str, Text]]) -> bool + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def is_within_directory(directory, target): + # type: ((Union[str, Text]), (Union[str, Text])) -> bool + """ + Return true if the absolute path of target is within the directory + """ + abs_directory = os.path.abspath(directory) + abs_target = os.path.abspath(target) + + prefix = os.path.commonprefix([abs_directory, abs_target]) + return prefix == abs_directory + + +def unzip_file(filename, location, flatten=True): + # type: (str, str, bool) -> None + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, 'rb') + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if not is_within_directory(location, fn): + message = ( + 'The zip file ({}) has a file ({}) trying to install ' + 'outside target directory ({})' + ) + raise InstallationError(message.format(filename, fn, location)) + if fn.endswith('/') or fn.endswith('\\'): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + # Don't use read() to avoid allocating an arbitrarily large + # chunk of memory for the file's content + fp = zip.open(name) + try: + with open(fn, 'wb') as destfp: + shutil.copyfileobj(fp, destfp) + finally: + fp.close() + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + if mode and stat.S_ISREG(mode) and mode & 0o111: + # make dest file have execute for user/group/world + # (chmod +x) no-op on windows per python docs + os.chmod(fn, (0o777 - current_umask() | 0o111)) + finally: + zipfp.close() + + +def untar_file(filename, location): + # type: (str, str) -> None + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): + mode = 'r:gz' + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = 'r:bz2' + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = 'r:xz' + elif filename.lower().endswith('.tar'): + mode = 'r' + else: + logger.warning( + 'Cannot determine compression type for file %s', filename, + ) + mode = 'r:*' + tar = tarfile.open(filename, mode) + try: + leading = has_leading_dir([ + member.name for member in tar.getmembers() + ]) + for member in tar.getmembers(): + fn = member.name + if leading: + # https://github.com/python/mypy/issues/1174 + fn = split_leading_dir(fn)[1] # type: ignore + path = os.path.join(location, fn) + if not is_within_directory(location, path): + message = ( + 'The tar file ({}) has a file ({}) trying to install ' + 'outside target directory ({})' + ) + raise InstallationError( + message.format(filename, path, location) + ) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + try: + # https://github.com/python/typeshed/issues/2673 + tar._extract_member(member, path) # type: ignore + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + ensure_dir(os.path.dirname(path)) + with open(path, 'wb') as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + # https://github.com/python/typeshed/issues/2673 + tar.utime(member, path) # type: ignore + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + # make dest file have execute for user/group/world + # no-op on windows per python docs + os.chmod(path, (0o777 - current_umask() | 0o111)) + finally: + tar.close() + + +def unpack_file( + filename, # type: str + location, # type: str + content_type=None, # type: Optional[str] +): + # type: (...) -> None + filename = os.path.realpath(filename) + if ( + content_type == 'application/zip' or + filename.lower().endswith(ZIP_EXTENSIONS) or + zipfile.is_zipfile(filename) + ): + unzip_file( + filename, + location, + flatten=not filename.endswith('.whl') + ) + elif ( + content_type == 'application/x-gzip' or + tarfile.is_tarfile(filename) or + filename.lower().endswith( + TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS + ) + ): + untar_file(filename, location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' + 'cannot detect archive format', + filename, location, content_type, + ) + raise InstallationError( + 'Cannot determine archive format of {}'.format(location) + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/urls.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..9ad40feb345423ea76d86cc0e4541e3de84bae34 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/urls.py @@ -0,0 +1,54 @@ +import os +import sys + +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, Text, Union + + +def get_url_scheme(url): + # type: (Union[str, Text]) -> Optional[Text] + if ':' not in url: + return None + return url.split(':', 1)[0].lower() + + +def path_to_url(path): + # type: (Union[str, Text]) -> str + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) + return url + + +def url_to_path(url): + # type: (str) -> str + """ + Convert a file: URL to a path. + """ + assert url.startswith('file:'), ( + "You can only turn file: urls into filenames (not %r)" % url) + + _, netloc, path, _, _ = urllib_parse.urlsplit(url) + + if not netloc or netloc == 'localhost': + # According to RFC 8089, same as empty authority. + netloc = '' + elif sys.platform == 'win32': + # If we have a UNC path, prepend UNC share notation. + netloc = '\\\\' + netloc + else: + raise ValueError( + 'non-local file URIs are not supported on this platform: %r' + % url + ) + + path = urllib_request.url2pathname(netloc + path) + return path diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/virtualenv.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/virtualenv.py new file mode 100644 index 0000000000000000000000000000000000000000..d81e6ac54bb13a898295923126a934b9ea76f641 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/virtualenv.py @@ -0,0 +1,115 @@ +from __future__ import absolute_import + +import logging +import os +import re +import site +import sys + +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Optional + +logger = logging.getLogger(__name__) +_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile( + r"include-system-site-packages\s*=\s*(?P<value>true|false)" +) + + +def _running_under_venv(): + # type: () -> bool + """Checks if sys.base_prefix and sys.prefix match. + + This handles PEP 405 compliant virtual environments. + """ + return sys.prefix != getattr(sys, "base_prefix", sys.prefix) + + +def _running_under_regular_virtualenv(): + # type: () -> bool + """Checks if sys.real_prefix is set. + + This handles virtual environments created with pypa's virtualenv. + """ + # pypa/virtualenv case + return hasattr(sys, 'real_prefix') + + +def running_under_virtualenv(): + # type: () -> bool + """Return True if we're running inside a virtualenv, False otherwise. + """ + return _running_under_venv() or _running_under_regular_virtualenv() + + +def _get_pyvenv_cfg_lines(): + # type: () -> Optional[List[str]] + """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines + + Returns None, if it could not read/access the file. + """ + pyvenv_cfg_file = os.path.join(sys.prefix, 'pyvenv.cfg') + try: + with open(pyvenv_cfg_file) as f: + return f.read().splitlines() # avoids trailing newlines + except IOError: + return None + + +def _no_global_under_venv(): + # type: () -> bool + """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion + + PEP 405 specifies that when system site-packages are not supposed to be + visible from a virtual environment, `pyvenv.cfg` must contain the following + line: + + include-system-site-packages = false + + Additionally, log a warning if accessing the file fails. + """ + cfg_lines = _get_pyvenv_cfg_lines() + if cfg_lines is None: + # We're not in a "sane" venv, so assume there is no system + # site-packages access (since that's PEP 405's default state). + logger.warning( + "Could not access 'pyvenv.cfg' despite a virtual environment " + "being active. Assuming global site-packages is not accessible " + "in this environment." + ) + return True + + for line in cfg_lines: + match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line) + if match is not None and match.group('value') == 'false': + return True + return False + + +def _no_global_under_regular_virtualenv(): + # type: () -> bool + """Check if "no-global-site-packages.txt" exists beside site.py + + This mirrors logic in pypa/virtualenv for determining whether system + site-packages are visible in the virtual environment. + """ + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_site_packages_file = os.path.join( + site_mod_dir, 'no-global-site-packages.txt', + ) + return os.path.exists(no_global_site_packages_file) + + +def virtualenv_no_global(): + # type: () -> bool + """Returns a boolean, whether running in venv with no system site-packages. + """ + + if _running_under_regular_virtualenv(): + return _no_global_under_regular_virtualenv() + + if _running_under_venv(): + return _no_global_under_venv() + + return False diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/utils/wheel.py b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/wheel.py new file mode 100644 index 0000000000000000000000000000000000000000..837e0afd7e5ca32666ffd0acdc33549d03626bcd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/utils/wheel.py @@ -0,0 +1,225 @@ +"""Support functions for working with wheel files. +""" + +from __future__ import absolute_import + +import logging +from email.parser import Parser +from zipfile import ZipFile + +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import DistInfoDistribution +from pip._vendor.six import PY2, ensure_str + +from pip._internal.exceptions import UnsupportedWheel +from pip._internal.utils.pkg_resources import DictMetadata +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from email.message import Message + from typing import Dict, Tuple + + from pip._vendor.pkg_resources import Distribution + +if PY2: + from zipfile import BadZipfile as BadZipFile +else: + from zipfile import BadZipFile + + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +class WheelMetadata(DictMetadata): + """Metadata provider that maps metadata decoding exceptions to our + internal exception type. + """ + def __init__(self, metadata, wheel_name): + # type: (Dict[str, bytes], str) -> None + super(WheelMetadata, self).__init__(metadata) + self._wheel_name = wheel_name + + def get_metadata(self, name): + # type: (str) -> str + try: + return super(WheelMetadata, self).get_metadata(name) + except UnicodeDecodeError as e: + # Augment the default error with the origin of the file. + raise UnsupportedWheel( + "Error decoding metadata for {}: {}".format( + self._wheel_name, e + ) + ) + + +def pkg_resources_distribution_for_wheel(wheel_zip, name, location): + # type: (ZipFile, str, str) -> Distribution + """Get a pkg_resources distribution given a wheel. + + :raises UnsupportedWheel: on any errors + """ + info_dir, _ = parse_wheel(wheel_zip, name) + + metadata_files = [ + p for p in wheel_zip.namelist() if p.startswith("{}/".format(info_dir)) + ] + + metadata_text = {} # type: Dict[str, bytes] + for path in metadata_files: + # If a flag is set, namelist entries may be unicode in Python 2. + # We coerce them to native str type to match the types used in the rest + # of the code. This cannot fail because unicode can always be encoded + # with UTF-8. + full_path = ensure_str(path) + _, metadata_name = full_path.split("/", 1) + + try: + metadata_text[metadata_name] = read_wheel_metadata_file( + wheel_zip, full_path + ) + except UnsupportedWheel as e: + raise UnsupportedWheel( + "{} has an invalid wheel, {}".format(name, str(e)) + ) + + metadata = WheelMetadata(metadata_text, location) + + return DistInfoDistribution( + location=location, metadata=metadata, project_name=name + ) + + +def parse_wheel(wheel_zip, name): + # type: (ZipFile, str) -> Tuple[str, Message] + """Extract information from the provided wheel, ensuring it meets basic + standards. + + Returns the name of the .dist-info directory and the parsed WHEEL metadata. + """ + try: + info_dir = wheel_dist_info_dir(wheel_zip, name) + metadata = wheel_metadata(wheel_zip, info_dir) + version = wheel_version(metadata) + except UnsupportedWheel as e: + raise UnsupportedWheel( + "{} has an invalid wheel, {}".format(name, str(e)) + ) + + check_compatibility(version, name) + + return info_dir, metadata + + +def wheel_dist_info_dir(source, name): + # type: (ZipFile, str) -> str + """Returns the name of the contained .dist-info directory. + + Raises AssertionError or UnsupportedWheel if not found, >1 found, or + it doesn't match the provided name. + """ + # Zip file path separators must be / + subdirs = list(set(p.split("/")[0] for p in source.namelist())) + + info_dirs = [s for s in subdirs if s.endswith('.dist-info')] + + if not info_dirs: + raise UnsupportedWheel(".dist-info directory not found") + + if len(info_dirs) > 1: + raise UnsupportedWheel( + "multiple .dist-info directories found: {}".format( + ", ".join(info_dirs) + ) + ) + + info_dir = info_dirs[0] + + info_dir_name = canonicalize_name(info_dir) + canonical_name = canonicalize_name(name) + if not info_dir_name.startswith(canonical_name): + raise UnsupportedWheel( + ".dist-info directory {!r} does not start with {!r}".format( + info_dir, canonical_name + ) + ) + + # Zip file paths can be unicode or str depending on the zip entry flags, + # so normalize it. + return ensure_str(info_dir) + + +def read_wheel_metadata_file(source, path): + # type: (ZipFile, str) -> bytes + try: + return source.read(path) + # BadZipFile for general corruption, KeyError for missing entry, + # and RuntimeError for password-protected files + except (BadZipFile, KeyError, RuntimeError) as e: + raise UnsupportedWheel( + "could not read {!r} file: {!r}".format(path, e) + ) + + +def wheel_metadata(source, dist_info_dir): + # type: (ZipFile, str) -> Message + """Return the WHEEL metadata of an extracted wheel, if possible. + Otherwise, raise UnsupportedWheel. + """ + path = "{}/WHEEL".format(dist_info_dir) + # Zip file path separators must be / + wheel_contents = read_wheel_metadata_file(source, path) + + try: + wheel_text = ensure_str(wheel_contents) + except UnicodeDecodeError as e: + raise UnsupportedWheel("error decoding {!r}: {!r}".format(path, e)) + + # FeedParser (used by Parser) does not raise any exceptions. The returned + # message may have .defects populated, but for backwards-compatibility we + # currently ignore them. + return Parser().parsestr(wheel_text) + + +def wheel_version(wheel_data): + # type: (Message) -> Tuple[int, ...] + """Given WHEEL metadata, return the parsed Wheel-Version. + Otherwise, raise UnsupportedWheel. + """ + version_text = wheel_data["Wheel-Version"] + if version_text is None: + raise UnsupportedWheel("WHEEL is missing Wheel-Version") + + version = version_text.strip() + + try: + return tuple(map(int, version.split('.'))) + except ValueError: + raise UnsupportedWheel("invalid Wheel-Version: {!r}".format(version)) + + +def check_compatibility(version, name): + # type: (Tuple[int, ...], str) -> None + """Raises errors or warns if called with an incompatible Wheel-Version. + + Pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "%s's Wheel-Version (%s) is not compatible with this version " + "of pip" % (name, '.'.join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + 'Installing from a newer Wheel-Version (%s)', + '.'.join(map(str, version)), + ) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2a4eb1375763fa3287d171a2a1b0766d1d9d1224 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__init__.py @@ -0,0 +1,15 @@ +# Expose a limited set of classes and functions so callers outside of +# the vcs package don't need to import deeper than `pip._internal.vcs`. +# (The test directory and imports protected by MYPY_CHECK_RUNNING may +# still need to import from a vcs sub-package.) +# Import all vcs modules to register each VCS in the VcsSupport object. +import pip._internal.vcs.bazaar +import pip._internal.vcs.git +import pip._internal.vcs.mercurial +import pip._internal.vcs.subversion # noqa: F401 +from pip._internal.vcs.versioncontrol import ( # noqa: F401 + RemoteNotFoundError, + is_url, + make_vcs_requirement_url, + vcs, +) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb7e777d49b02a53366eec594dfc6a31047c71eb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0f34d722ac6270fab71d5ff0ac12230664559ea9 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/git.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/git.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c047e5632459659056255749f800395e4f65afd3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/git.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4bce055dd6f5cf99d1ddee3712da51966cd76be Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..741e6af29a199554cbbc572df9a66e71df2717e2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46e1217d0e8abf68245609f08d25e27d9d1dbdef Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/bazaar.py b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/bazaar.py new file mode 100644 index 0000000000000000000000000000000000000000..347c06f9dc7c882299bf1a829049849a06328fe5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/bazaar.py @@ -0,0 +1,120 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import os + +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.utils.misc import display_path, rmtree +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import VersionControl, vcs + +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions + + +logger = logging.getLogger(__name__) + + +class Bazaar(VersionControl): + name = 'bzr' + dirname = '.bzr' + repo_name = 'branch' + schemes = ( + 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', + 'bzr+lp', + ) + + def __init__(self, *args, **kwargs): + super(Bazaar, self).__init__(*args, **kwargs) + # This is only needed for python <2.7.5 + # Register lp but do not expose as a scheme to support bzr+lp. + if getattr(urllib_parse, 'uses_fragment', None): + urllib_parse.uses_fragment.extend(['lp']) + + @staticmethod + def get_base_rev_args(rev): + return ['-r', rev] + + def export(self, location, url): + # type: (str, HiddenText) -> None + """ + Export the Bazaar repository at the url to the destination location + """ + # Remove the location to make sure Bazaar can export it correctly + if os.path.exists(location): + rmtree(location) + + url, rev_options = self.get_url_rev_options(url) + self.run_command( + make_command('export', location, url, rev_options.to_args()), + show_stdout=False, + ) + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + rev_display = rev_options.to_display() + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + cmd_args = ( + make_command('branch', '-q', rev_options.to_args(), url, dest) + ) + self.run_command(cmd_args) + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + self.run_command(make_command('switch', url), cwd=dest) + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command('pull', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it + url, rev, user_pass = super(Bazaar, cls).get_url_rev_and_auth(url) + if url.startswith('ssh://'): + url = 'bzr+' + url + return url, rev, user_pass + + @classmethod + def get_remote_url(cls, location): + urls = cls.run_command(['info'], show_stdout=False, cwd=location) + for line in urls.splitlines(): + line = line.strip() + for x in ('checkout of branch: ', + 'parent branch: '): + if line.startswith(x): + repo = line.split(x)[1] + if cls._is_local_repository(repo): + return path_to_url(repo) + return repo + return None + + @classmethod + def get_revision(cls, location): + revision = cls.run_command( + ['revno'], show_stdout=False, cwd=location, + ) + return revision.splitlines()[-1] + + @classmethod + def is_commit_id_equal(cls, dest, name): + """Always assume the versions don't match""" + return False + + +vcs.register(Bazaar) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/git.py b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/git.py new file mode 100644 index 0000000000000000000000000000000000000000..d706064e75b6639338e197124e75dadda5811332 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/git.py @@ -0,0 +1,395 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import os.path +import re + +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip._internal.exceptions import BadCommand +from pip._internal.utils.misc import display_path, hide_url +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.vcs.versioncontrol import ( + RemoteNotFoundError, + VersionControl, + find_path_to_setup_from_repo_root, + vcs, +) + +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions + + +urlsplit = urllib_parse.urlsplit +urlunsplit = urllib_parse.urlunsplit + + +logger = logging.getLogger(__name__) + + +HASH_REGEX = re.compile('^[a-fA-F0-9]{40}$') + + +def looks_like_hash(sha): + return bool(HASH_REGEX.match(sha)) + + +class Git(VersionControl): + name = 'git' + dirname = '.git' + repo_name = 'clone' + schemes = ( + 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file', + ) + # Prevent the user's environment variables from interfering with pip: + # https://github.com/pypa/pip/issues/1130 + unset_environ = ('GIT_DIR', 'GIT_WORK_TREE') + default_arg_rev = 'HEAD' + + @staticmethod + def get_base_rev_args(rev): + return [rev] + + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + _, rev_options = self.get_url_rev_options(hide_url(url)) + if not rev_options.rev: + return False + if not self.is_commit_id_equal(dest, rev_options.rev): + # the current commit is different from rev, + # which means rev was something else than a commit hash + return False + # return False in the rare case rev is both a commit hash + # and a tag or a branch; we don't want to cache in that case + # because that branch/tag could point to something else in the future + is_tag_or_branch = bool( + self.get_revision_sha(dest, rev_options.rev)[0] + ) + return not is_tag_or_branch + + def get_git_version(self): + VERSION_PFX = 'git version ' + version = self.run_command(['version'], show_stdout=False) + if version.startswith(VERSION_PFX): + version = version[len(VERSION_PFX):].split()[0] + else: + version = '' + # get first 3 positions of the git version because + # on windows it is x.y.z.windows.t, and this parses as + # LegacyVersion which always smaller than a Version. + version = '.'.join(version.split('.')[:3]) + return parse_version(version) + + @classmethod + def get_current_branch(cls, location): + """ + Return the current branch, or None if HEAD isn't at a branch + (e.g. detached HEAD). + """ + # git-symbolic-ref exits with empty stdout if "HEAD" is a detached + # HEAD rather than a symbolic ref. In addition, the -q causes the + # command to exit with status code 1 instead of 128 in this case + # and to suppress the message to stderr. + args = ['symbolic-ref', '-q', 'HEAD'] + output = cls.run_command( + args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, + ) + ref = output.strip() + + if ref.startswith('refs/heads/'): + return ref[len('refs/heads/'):] + + return None + + def export(self, location, url): + # type: (str, HiddenText) -> None + """Export the Git repository at the url to the destination location""" + if not location.endswith('/'): + location = location + '/' + + with TempDirectory(kind="export") as temp_dir: + self.unpack(temp_dir.path, url=url) + self.run_command( + ['checkout-index', '-a', '-f', '--prefix', location], + show_stdout=False, cwd=temp_dir.path + ) + + @classmethod + def get_revision_sha(cls, dest, rev): + """ + Return (sha_or_none, is_branch), where sha_or_none is a commit hash + if the revision names a remote branch or tag, otherwise None. + + Args: + dest: the repository directory. + rev: the revision name. + """ + # Pass rev to pre-filter the list. + output = cls.run_command(['show-ref', rev], cwd=dest, + show_stdout=False, on_returncode='ignore') + refs = {} + # NOTE: We do not use splitlines here since that would split on other + # unicode separators, which can be maliciously used to install a + # different revision. + for line in output.strip().split("\n"): + line = line.rstrip("\r") + if not line: + continue + try: + sha, ref = line.split(" ", maxsplit=2) + except ValueError: + # Include the offending line to simplify troubleshooting if + # this error ever occurs. + raise ValueError('unexpected show-ref line: {!r}'.format(line)) + + refs[ref] = sha + + branch_ref = 'refs/remotes/origin/{}'.format(rev) + tag_ref = 'refs/tags/{}'.format(rev) + + sha = refs.get(branch_ref) + if sha is not None: + return (sha, True) + + sha = refs.get(tag_ref) + + return (sha, False) + + @classmethod + def resolve_revision(cls, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> RevOptions + """ + Resolve a revision to a new RevOptions object with the SHA1 of the + branch, tag, or ref if found. + + Args: + rev_options: a RevOptions object. + """ + rev = rev_options.arg_rev + # The arg_rev property's implementation for Git ensures that the + # rev return value is always non-None. + assert rev is not None + + sha, is_branch = cls.get_revision_sha(dest, rev) + + if sha is not None: + rev_options = rev_options.make_new(sha) + rev_options.branch_name = rev if is_branch else None + + return rev_options + + # Do not show a warning for the common case of something that has + # the form of a Git commit hash. + if not looks_like_hash(rev): + logger.warning( + "Did not find branch or tag '%s', assuming revision or ref.", + rev, + ) + + if not rev.startswith('refs/'): + return rev_options + + # If it looks like a ref, we have to fetch it explicitly. + cls.run_command( + make_command('fetch', '-q', url, rev_options.to_args()), + cwd=dest, + ) + # Change the revision to the SHA of the ref we fetched + sha = cls.get_revision(dest, rev='FETCH_HEAD') + rev_options = rev_options.make_new(sha) + + return rev_options + + @classmethod + def is_commit_id_equal(cls, dest, name): + """ + Return whether the current commit hash equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + if not name: + # Then avoid an unnecessary subprocess call. + return False + + return cls.get_revision(dest) == name + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + rev_display = rev_options.to_display() + logger.info('Cloning %s%s to %s', url, rev_display, display_path(dest)) + self.run_command(make_command('clone', '-q', url, dest)) + + if rev_options.rev: + # Then a specific revision was requested. + rev_options = self.resolve_revision(dest, url, rev_options) + branch_name = getattr(rev_options, 'branch_name', None) + if branch_name is None: + # Only do a checkout if the current commit id doesn't match + # the requested revision. + if not self.is_commit_id_equal(dest, rev_options.rev): + cmd_args = make_command( + 'checkout', '-q', rev_options.to_args(), + ) + self.run_command(cmd_args, cwd=dest) + elif self.get_current_branch(dest) != branch_name: + # Then a specific branch was requested, and that branch + # is not yet checked out. + track_branch = 'origin/{}'.format(branch_name) + cmd_args = [ + 'checkout', '-b', branch_name, '--track', track_branch, + ] + self.run_command(cmd_args, cwd=dest) + + #: repo may contain submodules + self.update_submodules(dest) + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + self.run_command( + make_command('config', 'remote.origin.url', url), + cwd=dest, + ) + cmd_args = make_command('checkout', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + self.update_submodules(dest) + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + # First fetch changes from the default remote + if self.get_git_version() >= parse_version('1.9.0'): + # fetch tags in addition to everything else + self.run_command(['fetch', '-q', '--tags'], cwd=dest) + else: + self.run_command(['fetch', '-q'], cwd=dest) + # Then reset to wanted revision (maybe even origin/master) + rev_options = self.resolve_revision(dest, url, rev_options) + cmd_args = make_command('reset', '--hard', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + #: update submodules + self.update_submodules(dest) + + @classmethod + def get_remote_url(cls, location): + """ + Return URL of the first remote encountered. + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + # We need to pass 1 for extra_ok_returncodes since the command + # exits with return code 1 if there are no matching lines. + stdout = cls.run_command( + ['config', '--get-regexp', r'remote\..*\.url'], + extra_ok_returncodes=(1, ), show_stdout=False, cwd=location, + ) + remotes = stdout.splitlines() + try: + found_remote = remotes[0] + except IndexError: + raise RemoteNotFoundError + + for remote in remotes: + if remote.startswith('remote.origin.url '): + found_remote = remote + break + url = found_remote.split(' ')[1] + return url.strip() + + @classmethod + def get_revision(cls, location, rev=None): + if rev is None: + rev = 'HEAD' + current_rev = cls.run_command( + ['rev-parse', rev], show_stdout=False, cwd=location, + ) + return current_rev.strip() + + @classmethod + def get_subdirectory(cls, location): + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + # find the repo root + git_dir = cls.run_command( + ['rev-parse', '--git-dir'], + show_stdout=False, cwd=location).strip() + if not os.path.isabs(git_dir): + git_dir = os.path.join(location, git_dir) + repo_root = os.path.abspath(os.path.join(git_dir, '..')) + return find_path_to_setup_from_repo_root(location, repo_root) + + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes don't + work with a ssh:// scheme (e.g. GitHub). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + # Works around an apparent Git bug + # (see https://article.gmane.org/gmane.comp.version-control.git/146500) + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith('file'): + initial_slashes = path[:-len(path.lstrip('/'))] + newpath = ( + initial_slashes + + urllib_request.url2pathname(path) + .replace('\\', '/').lstrip('/') + ) + url = urlunsplit((scheme, netloc, newpath, query, fragment)) + after_plus = scheme.find('+') + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + + if '://' not in url: + assert 'file:' not in url + url = url.replace('git+', 'git+ssh://') + url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) + url = url.replace('ssh://', '') + else: + url, rev, user_pass = super(Git, cls).get_url_rev_and_auth(url) + + return url, rev, user_pass + + @classmethod + def update_submodules(cls, location): + if not os.path.exists(os.path.join(location, '.gitmodules')): + return + cls.run_command( + ['submodule', 'update', '--init', '--recursive', '-q'], + cwd=location, + ) + + @classmethod + def controls_location(cls, location): + if super(Git, cls).controls_location(location): + return True + try: + r = cls.run_command(['rev-parse'], + cwd=location, + show_stdout=False, + on_returncode='ignore', + log_failed_cmd=False) + return not r + except BadCommand: + logger.debug("could not determine if %s is under git control " + "because git is not available", location) + return False + + +vcs.register(Git) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/mercurial.py b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/mercurial.py new file mode 100644 index 0000000000000000000000000000000000000000..d9b58cfe9a4b7a437e0899945243f7e9be5215e9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/mercurial.py @@ -0,0 +1,155 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import os + +from pip._vendor.six.moves import configparser + +from pip._internal.exceptions import BadCommand, InstallationError +from pip._internal.utils.misc import display_path +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs.versioncontrol import ( + VersionControl, + find_path_to_setup_from_repo_root, + vcs, +) + +if MYPY_CHECK_RUNNING: + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import RevOptions + + +logger = logging.getLogger(__name__) + + +class Mercurial(VersionControl): + name = 'hg' + dirname = '.hg' + repo_name = 'clone' + schemes = ( + 'hg', 'hg+file', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http', + ) + + @staticmethod + def get_base_rev_args(rev): + return [rev] + + def export(self, location, url): + # type: (str, HiddenText) -> None + """Export the Hg repository at the url to the destination location""" + with TempDirectory(kind="export") as temp_dir: + self.unpack(temp_dir.path, url=url) + + self.run_command( + ['archive', location], show_stdout=False, cwd=temp_dir.path + ) + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + rev_display = rev_options.to_display() + logger.info( + 'Cloning hg %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(make_command('clone', '--noupdate', '-q', url, dest)) + self.run_command( + make_command('update', '-q', rev_options.to_args()), + cwd=dest, + ) + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + repo_config = os.path.join(dest, self.dirname, 'hgrc') + config = configparser.RawConfigParser() + try: + config.read(repo_config) + config.set('paths', 'default', url.secret) + with open(repo_config, 'w') as config_file: + config.write(config_file) + except (OSError, configparser.NoSectionError) as exc: + logger.warning( + 'Could not switch Mercurial repository to %s: %s', url, exc, + ) + else: + cmd_args = make_command('update', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + self.run_command(['pull', '-q'], cwd=dest) + cmd_args = make_command('update', '-q', rev_options.to_args()) + self.run_command(cmd_args, cwd=dest) + + @classmethod + def get_remote_url(cls, location): + url = cls.run_command( + ['showconfig', 'paths.default'], + show_stdout=False, cwd=location).strip() + if cls._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + @classmethod + def get_revision(cls, location): + """ + Return the repository-local changeset revision number, as an integer. + """ + current_revision = cls.run_command( + ['parents', '--template={rev}'], + show_stdout=False, cwd=location).strip() + return current_revision + + @classmethod + def get_requirement_revision(cls, location): + """ + Return the changeset identification hash, as a 40-character + hexadecimal string + """ + current_rev_hash = cls.run_command( + ['parents', '--template={node}'], + show_stdout=False, cwd=location).strip() + return current_rev_hash + + @classmethod + def is_commit_id_equal(cls, dest, name): + """Always assume the versions don't match""" + return False + + @classmethod + def get_subdirectory(cls, location): + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + # find the repo root + repo_root = cls.run_command( + ['root'], show_stdout=False, cwd=location).strip() + if not os.path.isabs(repo_root): + repo_root = os.path.abspath(os.path.join(location, repo_root)) + return find_path_to_setup_from_repo_root(location, repo_root) + + @classmethod + def controls_location(cls, location): + if super(Mercurial, cls).controls_location(location): + return True + try: + cls.run_command( + ['identify'], + cwd=location, + show_stdout=False, + on_returncode='raise', + log_failed_cmd=False) + return True + except (BadCommand, InstallationError): + return False + + +vcs.register(Mercurial) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/subversion.py b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/subversion.py new file mode 100644 index 0000000000000000000000000000000000000000..6c76d1ad435ab4718c95a09aafa8a8e69a996452 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/subversion.py @@ -0,0 +1,333 @@ +# The following comment should be removed at some point in the future. +# mypy: disallow-untyped-defs=False + +from __future__ import absolute_import + +import logging +import os +import re + +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ( + display_path, + is_console_interactive, + rmtree, + split_auth_from_netloc, +) +from pip._internal.utils.subprocess import make_command +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.vcs.versioncontrol import VersionControl, vcs + +_svn_xml_url_re = re.compile('url="([^"]+)"') +_svn_rev_re = re.compile(r'committed-rev="(\d+)"') +_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') +_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') + + +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple + from pip._internal.utils.subprocess import CommandArgs + from pip._internal.utils.misc import HiddenText + from pip._internal.vcs.versioncontrol import AuthInfo, RevOptions + + +logger = logging.getLogger(__name__) + + +class Subversion(VersionControl): + name = 'svn' + dirname = '.svn' + repo_name = 'checkout' + schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url): + return True + + @staticmethod + def get_base_rev_args(rev): + return ['-r', rev] + + @classmethod + def get_revision(cls, location): + """ + Return the maximum revision for all files under a given location + """ + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, files in os.walk(location): + if cls.dirname not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(cls.dirname) + entries_fn = os.path.join(base, cls.dirname, 'entries') + if not os.path.exists(entries_fn): + # FIXME: should we warn? + continue + + dirurl, localrev = cls._get_svn_url_rev(base) + + if base == location: + base = dirurl + '/' # save the root url + elif not dirurl or not dirurl.startswith(base): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return revision + + @classmethod + def get_netloc_and_auth(cls, netloc, scheme): + """ + This override allows the auth information to be passed to svn via the + --username and --password options instead of via the URL. + """ + if scheme == 'ssh': + # The --username and --password options can't be used for + # svn+ssh URLs, so keep the auth information in the URL. + return super(Subversion, cls).get_netloc_and_auth(netloc, scheme) + + return split_auth_from_netloc(netloc) + + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] + # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it + url, rev, user_pass = super(Subversion, cls).get_url_rev_and_auth(url) + if url.startswith('ssh://'): + url = 'svn+' + url + return url, rev, user_pass + + @staticmethod + def make_rev_args(username, password): + # type: (Optional[str], Optional[HiddenText]) -> CommandArgs + extra_args = [] # type: CommandArgs + if username: + extra_args += ['--username', username] + if password: + extra_args += ['--password', password] + + return extra_args + + @classmethod + def get_remote_url(cls, location): + # In cases where the source is in a subdirectory, not alongside + # setup.py we have to look up in the location until we find a real + # setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + return cls._get_svn_url_rev(location)[0] + + @classmethod + def _get_svn_url_rev(cls, location): + from pip._internal.exceptions import InstallationError + + entries_path = os.path.join(location, cls.dirname, 'entries') + if os.path.exists(entries_path): + with open(entries_path) as f: + data = f.read() + else: # subversion >= 1.7 does not have the 'entries' file + data = '' + + if (data.startswith('8') or + data.startswith('9') or + data.startswith('10')): + data = list(map(str.splitlines, data.split('\n\x0c\n'))) + del data[0][0] # get rid of the '8' + url = data[0][3] + revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] + elif data.startswith('<?xml'): + match = _svn_xml_url_re.search(data) + if not match: + raise ValueError('Badly formatted data: %r' % data) + url = match.group(1) # get repository URL + revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] + else: + try: + # subversion >= 1.7 + # Note that using get_remote_call_options is not necessary here + # because `svn info` is being run against a local directory. + # We don't need to worry about making sure interactive mode + # is being used to prompt for passwords, because passwords + # are only potentially needed for remote server requests. + xml = cls.run_command( + ['info', '--xml', location], + show_stdout=False, + ) + url = _svn_info_xml_url_re.search(xml).group(1) + revs = [ + int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) + ] + except InstallationError: + url, revs = None, [] + + if revs: + rev = max(revs) + else: + rev = 0 + + return url, rev + + @classmethod + def is_commit_id_equal(cls, dest, name): + """Always assume the versions don't match""" + return False + + def __init__(self, use_interactive=None): + # type: (bool) -> None + if use_interactive is None: + use_interactive = is_console_interactive() + self.use_interactive = use_interactive + + # This member is used to cache the fetched version of the current + # ``svn`` client. + # Special value definitions: + # None: Not evaluated yet. + # Empty tuple: Could not parse version. + self._vcs_version = None # type: Optional[Tuple[int, ...]] + + super(Subversion, self).__init__() + + def call_vcs_version(self): + # type: () -> Tuple[int, ...] + """Query the version of the currently installed Subversion client. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + # Example versions: + # svn, version 1.10.3 (r1842928) + # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0 + # svn, version 1.7.14 (r1542130) + # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu + version_prefix = 'svn, version ' + version = self.run_command(['--version'], show_stdout=False) + if not version.startswith(version_prefix): + return () + + version = version[len(version_prefix):].split()[0] + version_list = version.split('.') + try: + parsed_version = tuple(map(int, version_list)) + except ValueError: + return () + + return parsed_version + + def get_vcs_version(self): + # type: () -> Tuple[int, ...] + """Return the version of the currently installed Subversion client. + + If the version of the Subversion client has already been queried, + a cached value will be used. + + :return: A tuple containing the parts of the version information or + ``()`` if the version returned from ``svn`` could not be parsed. + :raises: BadCommand: If ``svn`` is not installed. + """ + if self._vcs_version is not None: + # Use cached version, if available. + # If parsing the version failed previously (empty tuple), + # do not attempt to parse it again. + return self._vcs_version + + vcs_version = self.call_vcs_version() + self._vcs_version = vcs_version + return vcs_version + + def get_remote_call_options(self): + # type: () -> CommandArgs + """Return options to be used on calls to Subversion that contact the server. + + These options are applicable for the following ``svn`` subcommands used + in this class. + + - checkout + - export + - switch + - update + + :return: A list of command line arguments to pass to ``svn``. + """ + if not self.use_interactive: + # --non-interactive switch is available since Subversion 0.14.4. + # Subversion < 1.8 runs in interactive mode by default. + return ['--non-interactive'] + + svn_version = self.get_vcs_version() + # By default, Subversion >= 1.8 runs in non-interactive mode if + # stdin is not a TTY. Since that is how pip invokes SVN, in + # call_subprocess(), pip must pass --force-interactive to ensure + # the user can be prompted for a password, if required. + # SVN added the --force-interactive option in SVN 1.8. Since + # e.g. RHEL/CentOS 7, which is supported until 2024, ships with + # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip + # can't safely add the option if the SVN version is < 1.8 (or unknown). + if svn_version >= (1, 8): + return ['--force-interactive'] + + return [] + + def export(self, location, url): + # type: (str, HiddenText) -> None + """Export the svn repository at the url to the destination location""" + url, rev_options = self.get_url_rev_options(url) + + logger.info('Exporting svn repository %s to %s', url, location) + with indent_log(): + if os.path.exists(location): + # Subversion doesn't like to check out over an existing + # directory --force fixes this, but was only added in svn 1.5 + rmtree(location) + cmd_args = make_command( + 'export', self.get_remote_call_options(), + rev_options.to_args(), url, location, + ) + self.run_command(cmd_args, show_stdout=False) + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + rev_display = rev_options.to_display() + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + cmd_args = make_command( + 'checkout', '-q', self.get_remote_call_options(), + rev_options.to_args(), url, dest, + ) + self.run_command(cmd_args) + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command( + 'switch', self.get_remote_call_options(), rev_options.to_args(), + url, dest, + ) + self.run_command(cmd_args) + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + cmd_args = make_command( + 'update', self.get_remote_call_options(), rev_options.to_args(), + dest, + ) + self.run_command(cmd_args) + + +vcs.register(Subversion) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/versioncontrol.py b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/versioncontrol.py new file mode 100644 index 0000000000000000000000000000000000000000..7cfd568829f27f188cf7a3cec86c3c840b8463cb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/vcs/versioncontrol.py @@ -0,0 +1,700 @@ +"""Handles all VCS (version control) support""" + +from __future__ import absolute_import + +import errno +import logging +import os +import shutil +import sys + +from pip._vendor import pkg_resources +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip._internal.exceptions import BadCommand +from pip._internal.utils.compat import samefile +from pip._internal.utils.misc import ( + ask_path_exists, + backup_dir, + display_path, + hide_url, + hide_value, + rmtree, +) +from pip._internal.utils.subprocess import call_subprocess, make_command +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import get_url_scheme + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Dict, Iterable, Iterator, List, Mapping, Optional, Text, Tuple, + Type, Union + ) + from pip._internal.utils.ui import SpinnerInterface + from pip._internal.utils.misc import HiddenText + from pip._internal.utils.subprocess import CommandArgs + + AuthInfo = Tuple[Optional[str], Optional[str]] + + +__all__ = ['vcs'] + + +logger = logging.getLogger(__name__) + + +def is_url(name): + # type: (Union[str, Text]) -> bool + """ + Return true if the name looks like a URL. + """ + scheme = get_url_scheme(name) + if scheme is None: + return False + return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes + + +def make_vcs_requirement_url(repo_url, rev, project_name, subdir=None): + # type: (str, str, str, Optional[str]) -> str + """ + Return the URL for a VCS requirement. + + Args: + repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+"). + project_name: the (unescaped) project name. + """ + egg_project_name = pkg_resources.to_filename(project_name) + req = '{}@{}#egg={}'.format(repo_url, rev, egg_project_name) + if subdir: + req += '&subdirectory={}'.format(subdir) + + return req + + +def find_path_to_setup_from_repo_root(location, repo_root): + # type: (str, str) -> Optional[str] + """ + Find the path to `setup.py` by searching up the filesystem from `location`. + Return the path to `setup.py` relative to `repo_root`. + Return None if `setup.py` is in `repo_root` or cannot be found. + """ + # find setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + if samefile(repo_root, location): + return None + + return os.path.relpath(location, repo_root) + + +class RemoteNotFoundError(Exception): + pass + + +class RevOptions(object): + + """ + Encapsulates a VCS-specific revision to install, along with any VCS + install options. + + Instances of this class should be treated as if immutable. + """ + + def __init__( + self, + vc_class, # type: Type[VersionControl] + rev=None, # type: Optional[str] + extra_args=None, # type: Optional[CommandArgs] + ): + # type: (...) -> None + """ + Args: + vc_class: a VersionControl subclass. + rev: the name of the revision to install. + extra_args: a list of extra options. + """ + if extra_args is None: + extra_args = [] + + self.extra_args = extra_args + self.rev = rev + self.vc_class = vc_class + self.branch_name = None # type: Optional[str] + + def __repr__(self): + # type: () -> str + return '<RevOptions {}: rev={!r}>'.format(self.vc_class.name, self.rev) + + @property + def arg_rev(self): + # type: () -> Optional[str] + if self.rev is None: + return self.vc_class.default_arg_rev + + return self.rev + + def to_args(self): + # type: () -> CommandArgs + """ + Return the VCS-specific command arguments. + """ + args = [] # type: CommandArgs + rev = self.arg_rev + if rev is not None: + args += self.vc_class.get_base_rev_args(rev) + args += self.extra_args + + return args + + def to_display(self): + # type: () -> str + if not self.rev: + return '' + + return ' (to revision {})'.format(self.rev) + + def make_new(self, rev): + # type: (str) -> RevOptions + """ + Make a copy of the current instance, but with a new rev. + + Args: + rev: the name of the revision for the new object. + """ + return self.vc_class.make_rev_options(rev, extra_args=self.extra_args) + + +class VcsSupport(object): + _registry = {} # type: Dict[str, VersionControl] + schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] + + def __init__(self): + # type: () -> None + # Register more schemes with urlparse for various version control + # systems + urllib_parse.uses_netloc.extend(self.schemes) + # Python >= 2.7.4, 3.3 doesn't have uses_fragment + if getattr(urllib_parse, 'uses_fragment', None): + urllib_parse.uses_fragment.extend(self.schemes) + super(VcsSupport, self).__init__() + + def __iter__(self): + # type: () -> Iterator[str] + return self._registry.__iter__() + + @property + def backends(self): + # type: () -> List[VersionControl] + return list(self._registry.values()) + + @property + def dirnames(self): + # type: () -> List[str] + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self): + # type: () -> List[str] + schemes = [] # type: List[str] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls): + # type: (Type[VersionControl]) -> None + if not hasattr(cls, 'name'): + logger.warning('Cannot register VCS %s', cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls() + logger.debug('Registered VCS backend: %s', cls.name) + + def unregister(self, name): + # type: (str) -> None + if name in self._registry: + del self._registry[name] + + def get_backend_for_dir(self, location): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object if a repository of that type is found + at the given directory. + """ + for vcs_backend in self._registry.values(): + if vcs_backend.controls_location(location): + logger.debug('Determine that %s uses VCS: %s', + location, vcs_backend.name) + return vcs_backend + return None + + def get_backend_for_scheme(self, scheme): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object or None. + """ + for vcs_backend in self._registry.values(): + if scheme in vcs_backend.schemes: + return vcs_backend + return None + + def get_backend(self, name): + # type: (str) -> Optional[VersionControl] + """ + Return a VersionControl object or None. + """ + name = name.lower() + return self._registry.get(name) + + +vcs = VcsSupport() + + +class VersionControl(object): + name = '' + dirname = '' + repo_name = '' + # List of supported schemes for this Version Control + schemes = () # type: Tuple[str, ...] + # Iterable of environment variable names to pass to call_subprocess(). + unset_environ = () # type: Tuple[str, ...] + default_arg_rev = None # type: Optional[str] + + @classmethod + def should_add_vcs_url_prefix(cls, remote_url): + # type: (str) -> bool + """ + Return whether the vcs prefix (e.g. "git+") should be added to a + repository's remote url when used in a requirement. + """ + return not remote_url.lower().startswith('{}:'.format(cls.name)) + + @classmethod + def get_subdirectory(cls, location): + # type: (str) -> Optional[str] + """ + Return the path to setup.py, relative to the repo root. + Return None if setup.py is in the repo root. + """ + return None + + @classmethod + def get_requirement_revision(cls, repo_dir): + # type: (str) -> str + """ + Return the revision string that should be used in a requirement. + """ + return cls.get_revision(repo_dir) + + @classmethod + def get_src_requirement(cls, repo_dir, project_name): + # type: (str, str) -> Optional[str] + """ + Return the requirement string to use to redownload the files + currently at the given repository directory. + + Args: + project_name: the (unescaped) project name. + + The return value has a form similar to the following: + + {repository_url}@{revision}#egg={project_name} + """ + repo_url = cls.get_remote_url(repo_dir) + if repo_url is None: + return None + + if cls.should_add_vcs_url_prefix(repo_url): + repo_url = '{}+{}'.format(cls.name, repo_url) + + revision = cls.get_requirement_revision(repo_dir) + subdir = cls.get_subdirectory(repo_dir) + req = make_vcs_requirement_url(repo_url, revision, project_name, + subdir=subdir) + + return req + + @staticmethod + def get_base_rev_args(rev): + # type: (str) -> List[str] + """ + Return the base revision arguments for a vcs command. + + Args: + rev: the name of a revision to install. Cannot be None. + """ + raise NotImplementedError + + def is_immutable_rev_checkout(self, url, dest): + # type: (str, str) -> bool + """ + Return true if the commit hash checked out at dest matches + the revision in url. + + Always return False, if the VCS does not support immutable commit + hashes. + + This method does not check if there are local uncommitted changes + in dest after checkout, as pip currently has no use case for that. + """ + return False + + @classmethod + def make_rev_options(cls, rev=None, extra_args=None): + # type: (Optional[str], Optional[CommandArgs]) -> RevOptions + """ + Return a RevOptions object. + + Args: + rev: the name of a revision to install. + extra_args: a list of extra options. + """ + return RevOptions(cls, rev, extra_args=extra_args) + + @classmethod + def _is_local_repository(cls, repo): + # type: (str) -> bool + """ + posix absolute paths start with os.path.sep, + win32 ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or bool(drive) + + def export(self, location, url): + # type: (str, HiddenText) -> None + """ + Export the repository at the url to the destination location + i.e. only download the files, without vcs informations + + :param url: the repository URL starting with a vcs prefix. + """ + raise NotImplementedError + + @classmethod + def get_netloc_and_auth(cls, netloc, scheme): + # type: (str, str) -> Tuple[str, Tuple[Optional[str], Optional[str]]] + """ + Parse the repository URL's netloc, and return the new netloc to use + along with auth information. + + Args: + netloc: the original repository URL netloc. + scheme: the repository URL's scheme without the vcs prefix. + + This is mainly for the Subversion class to override, so that auth + information can be provided via the --username and --password options + instead of through the URL. For other subclasses like Git without + such an option, auth information must stay in the URL. + + Returns: (netloc, (username, password)). + """ + return netloc, (None, None) + + @classmethod + def get_url_rev_and_auth(cls, url): + # type: (str) -> Tuple[str, Optional[str], AuthInfo] + """ + Parse the repository URL to use, and return the URL, revision, + and auth info to use. + + Returns: (url, rev, (username, password)). + """ + scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) + if '+' not in scheme: + raise ValueError( + "Sorry, {!r} is a malformed VCS url. " + "The format is <vcs>+<protocol>://<url>, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) + ) + # Remove the vcs prefix. + scheme = scheme.split('+', 1)[1] + netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme) + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) + return url, rev, user_pass + + @staticmethod + def make_rev_args(username, password): + # type: (Optional[str], Optional[HiddenText]) -> CommandArgs + """ + Return the RevOptions "extra arguments" to use in obtain(). + """ + return [] + + def get_url_rev_options(self, url): + # type: (HiddenText) -> Tuple[HiddenText, RevOptions] + """ + Return the URL and RevOptions object to use in obtain() and in + some cases export(), as a tuple (url, rev_options). + """ + secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret) + username, secret_password = user_pass + password = None # type: Optional[HiddenText] + if secret_password is not None: + password = hide_value(secret_password) + extra_args = self.make_rev_args(username, password) + rev_options = self.make_rev_options(rev, extra_args=extra_args) + + return hide_url(secret_url), rev_options + + @staticmethod + def normalize_url(url): + # type: (str) -> str + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib_parse.unquote(url).rstrip('/') + + @classmethod + def compare_urls(cls, url1, url2): + # type: (str, str) -> bool + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return (cls.normalize_url(url1) == cls.normalize_url(url2)) + + def fetch_new(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Fetch a revision from a repository, in the case that this is the + first fetch from the repository. + + Args: + dest: the directory to fetch the repository to. + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def switch(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Switch the repo at ``dest`` to point to ``URL``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + def update(self, dest, url, rev_options): + # type: (str, HiddenText, RevOptions) -> None + """ + Update an already-existing repo to the given ``rev_options``. + + Args: + rev_options: a RevOptions object. + """ + raise NotImplementedError + + @classmethod + def is_commit_id_equal(cls, dest, name): + # type: (str, Optional[str]) -> bool + """ + Return whether the id of the current commit equals the given name. + + Args: + dest: the repository directory. + name: a string name. + """ + raise NotImplementedError + + def obtain(self, dest, url): + # type: (str, HiddenText) -> None + """ + Install or update in editable mode the package represented by this + VersionControl object. + + :param dest: the repository directory in which to install or update. + :param url: the repository URL starting with a vcs prefix. + """ + url, rev_options = self.get_url_rev_options(url) + + if not os.path.exists(dest): + self.fetch_new(dest, url, rev_options) + return + + rev_display = rev_options.to_display() + if self.is_repository_directory(dest): + existing_url = self.get_remote_url(dest) + if self.compare_urls(existing_url, url.secret): + logger.debug( + '%s in %s exists, and has correct URL (%s)', + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.is_commit_id_equal(dest, rev_options.rev): + logger.info( + 'Updating %s %s%s', + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, url, rev_options) + else: + logger.info('Skipping because already up-to-date.') + return + + logger.warning( + '%s %s in %s exists with URL %s', + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', + ('s', 'i', 'w', 'b')) + else: + logger.warning( + 'Directory %s already exists, and is not a %s %s.', + dest, + self.name, + self.repo_name, + ) + # https://github.com/python/mypy/issues/1174 + prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore + ('i', 'w', 'b')) + + logger.warning( + 'The plan is to install the %s repository %s', + self.name, + url, + ) + response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) + + if response == 'a': + sys.exit(-1) + + if response == 'w': + logger.warning('Deleting %s', display_path(dest)) + rmtree(dest) + self.fetch_new(dest, url, rev_options) + return + + if response == 'b': + dest_dir = backup_dir(dest) + logger.warning( + 'Backing up %s to %s', display_path(dest), dest_dir, + ) + shutil.move(dest, dest_dir) + self.fetch_new(dest, url, rev_options) + return + + # Do nothing if the response is "i". + if response == 's': + logger.info( + 'Switching %s %s to %s%s', + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) + + def unpack(self, location, url): + # type: (str, HiddenText) -> None + """ + Clean up current location and download the url repository + (and vcs infos) into location + + :param url: the repository URL starting with a vcs prefix. + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location, url=url) + + @classmethod + def get_remote_url(cls, location): + # type: (str) -> str + """ + Return the url used at location + + Raises RemoteNotFoundError if the repository does not have a remote + url configured. + """ + raise NotImplementedError + + @classmethod + def get_revision(cls, location): + # type: (str) -> str + """ + Return the current commit id of the files at the given location. + """ + raise NotImplementedError + + @classmethod + def run_command( + cls, + cmd, # type: Union[List[str], CommandArgs] + show_stdout=True, # type: bool + cwd=None, # type: Optional[str] + on_returncode='raise', # type: str + extra_ok_returncodes=None, # type: Optional[Iterable[int]] + command_desc=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + spinner=None, # type: Optional[SpinnerInterface] + log_failed_cmd=True # type: bool + ): + # type: (...) -> Text + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = make_command(cls.name, *cmd) + try: + return call_subprocess(cmd, show_stdout, cwd, + on_returncode=on_returncode, + extra_ok_returncodes=extra_ok_returncodes, + command_desc=command_desc, + extra_environ=extra_environ, + unset_environ=cls.unset_environ, + spinner=spinner, + log_failed_cmd=log_failed_cmd) + except OSError as e: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + if e.errno == errno.ENOENT: + raise BadCommand( + 'Cannot find command %r - do you have ' + '%r installed and in your ' + 'PATH?' % (cls.name, cls.name)) + else: + raise # re-raise exception if a different error occurred + + @classmethod + def is_repository_directory(cls, path): + # type: (str) -> bool + """ + Return whether a directory path is a repository directory. + """ + logger.debug('Checking in %s for %s (%s)...', + path, cls.dirname, cls.name) + return os.path.exists(os.path.join(path, cls.dirname)) + + @classmethod + def controls_location(cls, location): + # type: (str) -> bool + """ + Check if a location is controlled by the vcs. + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + + This can do more than is_repository_directory() alone. For example, + the Git override checks that Git is actually available. + """ + return cls.is_repository_directory(location) diff --git a/backend/test/lib/python3.8/site-packages/pip/_internal/wheel_builder.py b/backend/test/lib/python3.8/site-packages/pip/_internal/wheel_builder.py new file mode 100644 index 0000000000000000000000000000000000000000..7c7820d4f26560dff25801dd5034b355d2823795 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_internal/wheel_builder.py @@ -0,0 +1,305 @@ +"""Orchestrator for building wheels from InstallRequirements. +""" + +# The following comment should be removed at some point in the future. +# mypy: strict-optional=False + +import logging +import os.path +import re +import shutil + +from pip._internal.models.link import Link +from pip._internal.operations.build.wheel import build_wheel_pep517 +from pip._internal.operations.build.wheel_legacy import build_wheel_legacy +from pip._internal.utils.logging import indent_log +from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed +from pip._internal.utils.setuptools_build import make_setuptools_clean_args +from pip._internal.utils.subprocess import call_subprocess +from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +from pip._internal.utils.urls import path_to_url +from pip._internal.vcs import vcs + +if MYPY_CHECK_RUNNING: + from typing import ( + Any, Callable, Iterable, List, Optional, Pattern, Tuple, + ) + + from pip._internal.cache import WheelCache + from pip._internal.req.req_install import InstallRequirement + + BinaryAllowedPredicate = Callable[[InstallRequirement], bool] + BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]] + +logger = logging.getLogger(__name__) + + +def _contains_egg_info( + s, _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): + # type: (str, Pattern[str]) -> bool + """Determine whether the string looks like an egg_info. + + :param s: The string to parse. E.g. foo-2.1 + """ + return bool(_egg_info_re.search(s)) + + +def _should_build( + req, # type: InstallRequirement + need_wheel, # type: bool + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + """Return whether an InstallRequirement should be built into a wheel.""" + if req.constraint: + # never build requirements that are merely constraints + return False + if req.is_wheel: + if need_wheel: + logger.info( + 'Skipping %s, due to already being wheel.', req.name, + ) + return False + + if need_wheel: + # i.e. pip wheel, not pip install + return True + + # From this point, this concerns the pip install command only + # (need_wheel=False). + + if not req.use_pep517 and not is_wheel_installed(): + # we don't build legacy requirements if wheel is not installed + return False + + if req.editable or not req.source_dir: + return False + + if not check_binary_allowed(req): + logger.info( + "Skipping wheel build for %s, due to binaries " + "being disabled for it.", req.name, + ) + return False + + return True + + +def should_build_for_wheel_command( + req, # type: InstallRequirement +): + # type: (...) -> bool + return _should_build( + req, need_wheel=True, check_binary_allowed=_always_true + ) + + +def should_build_for_install_command( + req, # type: InstallRequirement + check_binary_allowed, # type: BinaryAllowedPredicate +): + # type: (...) -> bool + return _should_build( + req, need_wheel=False, check_binary_allowed=check_binary_allowed + ) + + +def _should_cache( + req, # type: InstallRequirement +): + # type: (...) -> Optional[bool] + """ + Return whether a built InstallRequirement can be stored in the persistent + wheel cache, assuming the wheel cache is available, and _should_build() + has determined a wheel needs to be built. + """ + if not should_build_for_install_command( + req, check_binary_allowed=_always_true + ): + # never cache if pip install would not have built + # (editable mode, etc) + return False + + if req.link and req.link.is_vcs: + # VCS checkout. Do not cache + # unless it points to an immutable commit hash. + assert not req.editable + assert req.source_dir + vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) + assert vcs_backend + if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir): + return True + return False + + base, ext = req.link.splitext() + if _contains_egg_info(base): + return True + + # Otherwise, do not cache. + return False + + +def _get_cache_dir( + req, # type: InstallRequirement + wheel_cache, # type: WheelCache +): + # type: (...) -> str + """Return the persistent or temporary cache directory where the built + wheel need to be stored. + """ + cache_available = bool(wheel_cache.cache_dir) + if cache_available and _should_cache(req): + cache_dir = wheel_cache.get_path_for_link(req.link) + else: + cache_dir = wheel_cache.get_ephem_path_for_link(req.link) + return cache_dir + + +def _always_true(_): + # type: (Any) -> bool + return True + + +def _build_one( + req, # type: InstallRequirement + output_dir, # type: str + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + try: + ensure_dir(output_dir) + except OSError as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + return None + + # Install build deps into temporary directory (PEP 518) + with req.build_env: + return _build_one_inside_env( + req, output_dir, build_options, global_options + ) + + +def _build_one_inside_env( + req, # type: InstallRequirement + output_dir, # type: str + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> Optional[str] + with TempDirectory(kind="wheel") as temp_dir: + if req.use_pep517: + wheel_path = build_wheel_pep517( + name=req.name, + backend=req.pep517_backend, + metadata_directory=req.metadata_directory, + build_options=build_options, + tempd=temp_dir.path, + ) + else: + wheel_path = build_wheel_legacy( + name=req.name, + setup_py_path=req.setup_py_path, + source_dir=req.unpacked_source_directory, + global_options=global_options, + build_options=build_options, + tempd=temp_dir.path, + ) + + if wheel_path is not None: + wheel_name = os.path.basename(wheel_path) + dest_path = os.path.join(output_dir, wheel_name) + try: + wheel_hash, length = hash_file(wheel_path) + shutil.move(wheel_path, dest_path) + logger.info('Created wheel for %s: ' + 'filename=%s size=%d sha256=%s', + req.name, wheel_name, length, + wheel_hash.hexdigest()) + logger.info('Stored in directory: %s', output_dir) + return dest_path + except Exception as e: + logger.warning( + "Building wheel for %s failed: %s", + req.name, e, + ) + # Ignore return, we can't do anything else useful. + if not req.use_pep517: + _clean_one_legacy(req, global_options) + return None + + +def _clean_one_legacy(req, global_options): + # type: (InstallRequirement, List[str]) -> bool + clean_args = make_setuptools_clean_args( + req.setup_py_path, + global_options=global_options, + ) + + logger.info('Running setup.py clean for %s', req.name) + try: + call_subprocess(clean_args, cwd=req.source_dir) + return True + except Exception: + logger.error('Failed cleaning build dir for %s', req.name) + return False + + +def build( + requirements, # type: Iterable[InstallRequirement] + wheel_cache, # type: WheelCache + build_options, # type: List[str] + global_options, # type: List[str] +): + # type: (...) -> BuildResult + """Build wheels. + + :return: The list of InstallRequirement that succeeded to build and + the list of InstallRequirement that failed to build. + """ + if not requirements: + return [], [] + + # Build the wheels. + logger.info( + 'Building wheels for collected packages: %s', + ', '.join(req.name for req in requirements), + ) + + with indent_log(): + build_successes, build_failures = [], [] + for req in requirements: + cache_dir = _get_cache_dir(req, wheel_cache) + wheel_file = _build_one( + req, cache_dir, build_options, global_options + ) + if wheel_file: + # Update the link for this. + req.link = Link(path_to_url(wheel_file)) + req.local_file_path = req.link.file_path + assert req.link.is_wheel + build_successes.append(req) + else: + build_failures.append(req) + + # notify success/failure + if build_successes: + logger.info( + 'Successfully built %s', + ' '.join([req.name for req in build_successes]), + ) + if build_failures: + logger.info( + 'Failed to build %s', + ' '.join([req.name for req in build_failures]), + ) + # Return a list of requirements that failed to build + return build_successes, build_failures diff --git a/backend/test/lib/python3.8/site-packages/pip/_vendor/__init__.py b/backend/test/lib/python3.8/site-packages/pip/_vendor/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e02eaef6d8ab18106bf6600e87c269b41d923fa2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pip/_vendor/__init__.py @@ -0,0 +1,119 @@ +""" +pip._vendor is for vendoring dependencies of pip to prevent needing pip to +depend on something external. + +Files inside of pip._vendor should be considered immutable and should only be +updated to versions from upstream. +""" +from __future__ import absolute_import + +import glob +import os.path +import sys + +# Downstream redistributors which have debundled our dependencies should also +# patch this value to be true. This will trigger the additional patching +# to cause things like "six" to be available as pip. +DEBUNDLED = True + +# By default, look in this directory for a bunch of .whl files which we will +# add to the beginning of sys.path before attempting to import anything. This +# is done to support downstream re-distributors like Debian and Fedora who +# wish to create their own Wheels for our dependencies to aid in debundling. +prefix = getattr(sys, "base_prefix", sys.prefix) +if prefix.startswith('/usr/lib/pypy'): + prefix = '/usr' +WHEEL_DIR = os.path.abspath(os.path.join(prefix, 'share', 'python-wheels')) + + +# Define a small helper function to alias our vendored modules to the real ones +# if the vendored ones do not exist. This idea of this was taken from +# https://github.com/kennethreitz/requests/pull/2567. +def vendored(modulename): + vendored_name = "{0}.{1}".format(__name__, modulename) + + try: + __import__(modulename, globals(), locals(), level=0) + except ImportError: + # We can just silently allow import failures to pass here. If we + # got to this point it means that ``import pip._vendor.whatever`` + # failed and so did ``import whatever``. Since we're importing this + # upfront in an attempt to alias imports, not erroring here will + # just mean we get a regular import error whenever pip *actually* + # tries to import one of these modules to use it, which actually + # gives us a better error message than we would have otherwise + # gotten. + pass + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) + + +# If we're operating in a debundled setup, then we want to go ahead and trigger +# the aliasing of our vendored libraries as well as looking for wheels to add +# to our sys.path. This will cause all of this code to be a no-op typically +# however downstream redistributors can enable it in a consistent way across +# all platforms. +if DEBUNDLED: + # Actually look inside of WHEEL_DIR to find .whl files and add them to the + # front of our sys.path. + sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path + + # Actually alias all of our vendored dependencies. + vendored("appdirs") + vendored("cachecontrol") + vendored("colorama") + vendored("contextlib2") + vendored("distlib") + vendored("distro") + vendored("html5lib") + vendored("six") + vendored("six.moves") + vendored("six.moves.urllib") + vendored("six.moves.urllib.parse") + vendored("packaging") + vendored("packaging.version") + vendored("packaging.specifiers") + vendored("pep517") + vendored("pkg_resources") + vendored("progress") + vendored("retrying") + vendored("requests") + vendored("requests.exceptions") + vendored("requests.packages") + vendored("requests.packages.urllib3") + vendored("requests.packages.urllib3._collections") + vendored("requests.packages.urllib3.connection") + vendored("requests.packages.urllib3.connectionpool") + vendored("requests.packages.urllib3.contrib") + vendored("requests.packages.urllib3.contrib.ntlmpool") + vendored("requests.packages.urllib3.contrib.pyopenssl") + vendored("requests.packages.urllib3.exceptions") + vendored("requests.packages.urllib3.fields") + vendored("requests.packages.urllib3.filepost") + vendored("requests.packages.urllib3.packages") + try: + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") + except ImportError: + # Debian already unbundles these from requests. + pass + vendored("requests.packages.urllib3.packages.ssl_match_hostname") + vendored("requests.packages.urllib3.packages.ssl_match_hostname." + "_implementation") + vendored("requests.packages.urllib3.poolmanager") + vendored("requests.packages.urllib3.request") + vendored("requests.packages.urllib3.response") + vendored("requests.packages.urllib3.util") + vendored("requests.packages.urllib3.util.connection") + vendored("requests.packages.urllib3.util.request") + vendored("requests.packages.urllib3.util.response") + vendored("requests.packages.urllib3.util.retry") + vendored("requests.packages.urllib3.util.ssl_") + vendored("requests.packages.urllib3.util.timeout") + vendored("requests.packages.urllib3.util.url") + vendored("toml") + vendored("toml.encoder") + vendored("toml.decoder") + vendored("urllib3") diff --git a/backend/test/lib/python3.8/site-packages/pip/_vendor/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pip/_vendor/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7b571d8f306583ae5b570435d94d8527c7486b3e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pip/_vendor/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/AUTHORS.txt b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/AUTHORS.txt new file mode 100644 index 0000000000000000000000000000000000000000..72c87d7d38ae7bf859717c333a5ee8230f6ce624 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/AUTHORS.txt @@ -0,0 +1,562 @@ +A_Rog <adam.thomas.rogerson@gmail.com> +Aakanksha Agrawal <11389424+rasponic@users.noreply.github.com> +Abhinav Sagar <40603139+abhinavsagar@users.noreply.github.com> +ABHYUDAY PRATAP SINGH <abhyudaypratap@outlook.com> +abs51295 <aagams68@gmail.com> +AceGentile <ventogrigio83@gmail.com> +Adam Chainz <adam@adamj.eu> +Adam Tse <adam.tse@me.com> +Adam Tse <atse@users.noreply.github.com> +Adam Wentz <awentz@theonion.com> +admin <admin@admins-MacBook-Pro.local> +Adrien Morison <adrien.morison@gmail.com> +ahayrapetyan <ahayrapetya2@bloomberg.net> +Ahilya <ahilya16009@iiitd.ac.in> +AinsworthK <yat626@yahoo.com.hk> +Akash Srivastava <akashsrivastava4927@gmail.com> +Alan Yee <alyee@ucsd.edu> +Albert Tugushev <albert@tugushev.ru> +Albert-Guan <albert.guan94@gmail.com> +albertg <albert.guan94@gmail.com> +Aleks Bunin <github@compuix.com> +Alethea Flowers <magicalgirl@google.com> +Alex Gaynor <alex.gaynor@gmail.com> +Alex Grönholm <alex.gronholm@nextday.fi> +Alex Loosley <a.loosley@reply.de> +Alex Morega <alex@grep.ro> +Alex Stachowiak <alexander@computer.org> +Alexander Shtyrov <rawzausho@gmail.com> +Alexandre Conrad <alexandre.conrad@gmail.com> +Alexey Popravka <a.popravka@smartweb.com.ua> +Alexey Popravka <alexey.popravka@horsedevel.com> +Alli <alzeih@users.noreply.github.com> +Ami Fischman <ami@fischman.org> +Ananya Maiti <ananyoevo@gmail.com> +Anatoly Techtonik <techtonik@gmail.com> +Anders Kaseorg <andersk@mit.edu> +Andreas Lutro <anlutro@gmail.com> +Andrei Geacar <andrei.geacar@gmail.com> +Andrew Gaul <andrew@gaul.org> +Andrey Bulgakov <mail@andreiko.ru> +Andrés Delfino <34587441+andresdelfino@users.noreply.github.com> +Andrés Delfino <adelfino@gmail.com> +Andy Freeland <andy.freeland@redjack.com> +Andy Freeland <andy@andyfreeland.net> +Andy Kluger <AndydeCleyre@users.noreply.github.com> +Ani Hayrapetyan <ahayrapetya2@bloomberg.net> +Aniruddha Basak <codewithaniruddha@gmail.com> +Anish Tambe <anish.tambe@yahoo.in> +Anrs Hu <anrs@douban.com> +Anthony Sottile <asottile@umich.edu> +Antoine Musso <hashar@free.fr> +Anton Ovchinnikov <revolver112@gmail.com> +Anton Patrushev <apatrushev@gmail.com> +Antonio Alvarado Hernandez <tnotstar@gmail.com> +Antony Lee <anntzer.lee@gmail.com> +Antti Kaihola <akaihol+github@ambitone.com> +Anubhav Patel <anubhavp28@gmail.com> +Anuj Godase <godaseanuj@gmail.com> +AQNOUCH Mohammed <aqnouch.mohammed@gmail.com> +AraHaan <seandhunt_7@yahoo.com> +Arindam Choudhury <arindam@live.com> +Armin Ronacher <armin.ronacher@active-4.com> +Artem <duketemon@users.noreply.github.com> +Ashley Manton <ajd.manton@googlemail.com> +Ashwin Ramaswami <aramaswamis@gmail.com> +atse <atse@users.noreply.github.com> +Atsushi Odagiri <aodagx@gmail.com> +Avner Cohen <israbirding@gmail.com> +Baptiste Mispelon <bmispelon@gmail.com> +Barney Gale <barney.gale@gmail.com> +barneygale <barney.gale@gmail.com> +Bartek Ogryczak <b.ogryczak@gmail.com> +Bastian Venthur <mail@venthur.de> +Ben Darnell <ben@bendarnell.com> +Ben Hoyt <benhoyt@gmail.com> +Ben Rosser <rosser.bjr@gmail.com> +Bence Nagy <bence@underyx.me> +Benjamin Peterson <benjamin@python.org> +Benjamin VanEvery <ben@simondata.com> +Benoit Pierre <benoit.pierre@gmail.com> +Berker Peksag <berker.peksag@gmail.com> +Bernardo B. Marques <bernardo.fire@gmail.com> +Bernhard M. Wiedemann <bwiedemann@suse.de> +Bertil Hatt <bertil.hatt@farfetch.com> +Bogdan Opanchuk <bogdan@opanchuk.net> +BorisZZZ <BorisZZZ@users.noreply.github.com> +Brad Erickson <eosrei@gmail.com> +Bradley Ayers <bradley.ayers@gmail.com> +Brandon L. Reiss <brandon@damyata.co> +Brandt Bucher <brandtbucher@gmail.com> +Brett Randall <javabrett@gmail.com> +Brian Cristante <33549821+brcrista@users.noreply.github.com> +Brian Cristante <brcrista@microsoft.com> +Brian Rosner <brosner@gmail.com> +BrownTruck <BrownTruck@users.noreply.github.com> +Bruno Oliveira <nicoddemus@gmail.com> +Bruno Renié <brutasse@gmail.com> +Bstrdsmkr <bstrdsmkr@gmail.com> +Buck Golemon <buck@yelp.com> +burrows <burrows@preveil.com> +Bussonnier Matthias <bussonniermatthias@gmail.com> +c22 <c22@users.noreply.github.com> +Caleb Martinez <accounts@calebmartinez.com> +Calvin Smith <eukaryote@users.noreply.github.com> +Carl Meyer <carl@oddbird.net> +Carlos Liam <carlos@aarzee.me> +Carol Willing <carolcode@willingconsulting.com> +Carter Thayer <carterwthayer@gmail.com> +Cass <cass.petrus@gmail.com> +Chandrasekhar Atina <chandu.atina@gmail.com> +Chih-Hsuan Yen <yan12125@gmail.com> +Chih-Hsuan Yen <yen@chyen.cc> +Chris Brinker <chris.brinker@gmail.com> +Chris Hunt <chrahunt@gmail.com> +Chris Jerdonek <chris.jerdonek@gmail.com> +Chris McDonough <chrism@plope.com> +Chris Wolfe <chriswwolfe@gmail.com> +Christian Heimes <christian@python.org> +Christian Oudard <christian.oudard@gmail.com> +Christopher Hunt <chrahunt@gmail.com> +Christopher Snyder <cnsnyder@users.noreply.github.com> +Clark Boylan <clark.boylan@gmail.com> +Clay McClure <clay@daemons.net> +Cody <Purring@users.noreply.github.com> +Cody Soyland <codysoyland@gmail.com> +Colin Watson <cjwatson@debian.org> +Connor Osborn <cdosborn@email.arizona.edu> +Cooper Lees <me@cooperlees.com> +Cooper Ry Lees <me@cooperlees.com> +Cory Benfield <lukasaoz@gmail.com> +Cory Wright <corywright@gmail.com> +Craig Kerstiens <craig.kerstiens@gmail.com> +Cristian Sorinel <cristian.sorinel@gmail.com> +Curtis Doty <Curtis@GreenKey.net> +cytolentino <ctolentino8@bloomberg.net> +Damian Quiroga <qdamian@gmail.com> +Dan Black <dyspop@gmail.com> +Dan Savilonis <djs@n-cube.org> +Dan Sully <daniel-github@electricrain.com> +daniel <mcdonaldd@unimelb.edu.au> +Daniel Collins <accounts@dac.io> +Daniel Hahler <git@thequod.de> +Daniel Holth <dholth@fastmail.fm> +Daniel Jost <torpedojost@gmail.com> +Daniel Shaulov <daniel.shaulov@gmail.com> +Daniele Esposti <expobrain@users.noreply.github.com> +Daniele Procida <daniele@vurt.org> +Danny Hermes <daniel.j.hermes@gmail.com> +Dav Clark <davclark@gmail.com> +Dave Abrahams <dave@boostpro.com> +Dave Jones <dave@waveform.org.uk> +David Aguilar <davvid@gmail.com> +David Black <db@d1b.org> +David Bordeynik <david.bordeynik@gmail.com> +David Bordeynik <david@zebra-med.com> +David Caro <david@dcaro.es> +David Evans <d@drhevans.com> +David Linke <dr.david.linke@gmail.com> +David Pursehouse <david.pursehouse@gmail.com> +David Tucker <david@tucker.name> +David Wales <daviewales@gmail.com> +Davidovich <david.genest@gmail.com> +derwolfe <chriswwolfe@gmail.com> +Desetude <harry@desetude.com> +Diego Caraballo <diegocaraballo84@gmail.com> +DiegoCaraballo <diegocaraballo84@gmail.com> +Dmitry Gladkov <dmitry.gladkov@gmail.com> +Domen Kožar <domen@dev.si> +Donald Stufft <donald@stufft.io> +Dongweiming <dongweiming@admaster.com.cn> +Douglas Thor <dougthor42@users.noreply.github.com> +DrFeathers <WilliamGeorgeBurgess@gmail.com> +Dustin Ingram <di@di.codes> +Dwayne Bailey <dwayne@translate.org.za> +Ed Morley <501702+edmorley@users.noreply.github.com> +Ed Morley <emorley@mozilla.com> +Eitan Adler <lists@eitanadler.com> +ekristina <panacejja@gmail.com> +elainechan <elaine.chan@outlook.com> +Eli Schwartz <eschwartz93@gmail.com> +Eli Schwartz <eschwartz@archlinux.org> +Emil Burzo <contact@emilburzo.com> +Emil Styrke <emil.styrke@gmail.com> +Endoh Takanao <djmchl@gmail.com> +enoch <lanxenet@gmail.com> +Erdinc Mutlu <erdinc_mutlu@yahoo.com> +Eric Gillingham <Gillingham@bikezen.net> +Eric Hanchrow <eric.hanchrow@gmail.com> +Eric Hopper <hopper@omnifarious.org> +Erik M. Bray <embray@stsci.edu> +Erik Rose <erik@mozilla.com> +Ernest W Durbin III <ewdurbin@gmail.com> +Ernest W. Durbin III <ewdurbin@gmail.com> +Erwin Janssen <erwinjanssen@outlook.com> +Eugene Vereshchagin <evvers@gmail.com> +everdimension <everdimension@gmail.com> +Felix Yan <felixonmars@archlinux.org> +fiber-space <fiber-space@users.noreply.github.com> +Filip Kokosiński <filip.kokosinski@gmail.com> +Florian Briand <ownerfrance+github@hotmail.com> +Florian Rathgeber <florian.rathgeber@gmail.com> +Francesco <f.guerrieri@gmail.com> +Francesco Montesano <franz.bergesund@gmail.com> +Frost Ming <mianghong@gmail.com> +Gabriel Curio <g.curio@gmail.com> +Gabriel de Perthuis <g2p.code@gmail.com> +Garry Polley <garrympolley@gmail.com> +gdanielson <graeme.danielson@gmail.com> +Geoffrey Lehée <geoffrey@lehee.name> +Geoffrey Sneddon <me@gsnedders.com> +George Song <george@55minutes.com> +Georgi Valkov <georgi.t.valkov@gmail.com> +Giftlin Rajaiah <giftlin.rgn@gmail.com> +gizmoguy1 <gizmoguy1@gmail.com> +gkdoc <40815324+gkdoc@users.noreply.github.com> +Gopinath M <31352222+mgopi1990@users.noreply.github.com> +GOTO Hayato <3532528+gh640@users.noreply.github.com> +gpiks <gaurav.pikale@gmail.com> +Guilherme Espada <porcariadagata@gmail.com> +Guy Rozendorn <guy@rzn.co.il> +gzpan123 <gzpan123@gmail.com> +Hanjun Kim <hallazzang@gmail.com> +Hari Charan <hcharan997@gmail.com> +Harsh Vardhan <harsh59v@gmail.com> +Herbert Pfennig <herbert@albinen.com> +Hsiaoming Yang <lepture@me.com> +Hugo <hugovk@users.noreply.github.com> +Hugo Lopes Tavares <hltbra@gmail.com> +Hugo van Kemenade <hugovk@users.noreply.github.com> +hugovk <hugovk@users.noreply.github.com> +Hynek Schlawack <hs@ox.cx> +Ian Bicking <ianb@colorstudy.com> +Ian Cordasco <graffatcolmingov@gmail.com> +Ian Lee <IanLee1521@gmail.com> +Ian Stapleton Cordasco <graffatcolmingov@gmail.com> +Ian Wienand <ian@wienand.org> +Ian Wienand <iwienand@redhat.com> +Igor Kuzmitshov <kuzmiigo@gmail.com> +Igor Sobreira <igor@igorsobreira.com> +Ilya Baryshev <baryshev@gmail.com> +INADA Naoki <songofacandy@gmail.com> +Ionel Cristian Mărieș <contact@ionelmc.ro> +Ionel Maries Cristian <ionel.mc@gmail.com> +Ivan Pozdeev <vano@mail.mipt.ru> +Jacob Kim <me@thejacobkim.com> +jakirkham <jakirkham@gmail.com> +Jakub Stasiak <kuba.stasiak@gmail.com> +Jakub Vysoky <jakub@borka.cz> +Jakub Wilk <jwilk@jwilk.net> +James Cleveland <jamescleveland@gmail.com> +James Cleveland <radiosilence@users.noreply.github.com> +James Firth <hello@james-firth.com> +James Polley <jp@jamezpolley.com> +Jan Pokorný <jpokorny@redhat.com> +Jannis Leidel <jannis@leidel.info> +jarondl <me@jarondl.net> +Jason R. Coombs <jaraco@jaraco.com> +Jay Graves <jay@skabber.com> +Jean-Christophe Fillion-Robin <jchris.fillionr@kitware.com> +Jeff Barber <jbarber@computer.org> +Jeff Dairiki <dairiki@dairiki.org> +Jelmer Vernooij <jelmer@jelmer.uk> +jenix21 <devfrog@gmail.com> +Jeremy Stanley <fungi@yuggoth.org> +Jeremy Zafran <jzafran@users.noreply.github.com> +Jiashuo Li <jiasli@microsoft.com> +Jim Garrison <jim@garrison.cc> +Jivan Amara <Development@JivanAmara.net> +John Paton <j.paton@catawiki.nl> +John-Scott Atlakson <john.scott.atlakson@gmail.com> +johnthagen <johnthagen@gmail.com> +johnthagen <johnthagen@users.noreply.github.com> +Jon Banafato <jon@jonafato.com> +Jon Dufresne <jon.dufresne@gmail.com> +Jon Parise <jon@indelible.org> +Jonas Nockert <jonasnockert@gmail.com> +Jonathan Herbert <foohyfooh@gmail.com> +Joost Molenaar <j.j.molenaar@gmail.com> +Jorge Niedbalski <niedbalski@gmail.com> +Joseph Long <jdl@fastmail.fm> +Josh Bronson <jabronson@gmail.com> +Josh Hansen <josh@skwash.net> +Josh Schneier <josh.schneier@gmail.com> +Juanjo Bazán <jjbazan@gmail.com> +Julian Berman <Julian@GrayVines.com> +Julian Gethmann <julian.gethmann@kit.edu> +Julien Demoor <julien@jdemoor.com> +jwg4 <jack.grahl@yahoo.co.uk> +Jyrki Pulliainen <jyrki@spotify.com> +Kai Chen <kaichen120@gmail.com> +Kamal Bin Mustafa <kamal@smach.net> +kaustav haldar <hi@kaustav.me> +keanemind <keanemind@gmail.com> +Keith Maxwell <keith.maxwell@gmail.com> +Kelsey Hightower <kelsey.hightower@gmail.com> +Kenneth Belitzky <kenny@belitzky.com> +Kenneth Reitz <me@kennethreitz.com> +Kenneth Reitz <me@kennethreitz.org> +Kevin Burke <kev@inburke.com> +Kevin Carter <kevin.carter@rackspace.com> +Kevin Frommelt <kevin.frommelt@webfilings.com> +Kevin R Patterson <kevin.r.patterson@intel.com> +Kexuan Sun <me@kianasun.com> +Kit Randel <kit@nocturne.net.nz> +kpinc <kop@meme.com> +Krishna Oza <krishoza15sep@gmail.com> +Kumar McMillan <kumar.mcmillan@gmail.com> +Kyle Persohn <kyle.persohn@gmail.com> +lakshmanaram <lakshmanaram.n@gmail.com> +Laszlo Kiss-Kollar <kiss.kollar.laszlo@gmail.com> +Laurent Bristiel <laurent@bristiel.com> +Laurie Opperman <laurie@sitesee.com.au> +Leon Sasson <leonsassonha@gmail.com> +Lev Givon <lev@columbia.edu> +Lincoln de Sousa <lincoln@comum.org> +Lipis <lipiridis@gmail.com> +Loren Carvalho <lcarvalho@linkedin.com> +Lucas Cimon <lucas.cimon@gmail.com> +Ludovic Gasc <gmludo@gmail.com> +Luke Macken <lmacken@redhat.com> +Luo Jiebin <luo.jiebin@qq.com> +luojiebin <luojiebin@users.noreply.github.com> +luz.paz <luzpaz@users.noreply.github.com> +László Kiss Kollár <lkisskollar@bloomberg.net> +László Kiss Kollár <lkollar@users.noreply.github.com> +Marc Abramowitz <marc@marc-abramowitz.com> +Marc Tamlyn <marc.tamlyn@gmail.com> +Marcus Smith <qwcode@gmail.com> +Mariatta <Mariatta@users.noreply.github.com> +Mark Kohler <mark.kohler@proteinsimple.com> +Mark Williams <markrwilliams@gmail.com> +Mark Williams <mrw@enotuniq.org> +Markus Hametner <fin+github@xbhd.org> +Masaki <mk5986@nyu.edu> +Masklinn <bitbucket.org@masklinn.net> +Matej Stuchlik <mstuchli@redhat.com> +Mathew Jennings <mjennings@foursquare.com> +Mathieu Bridon <bochecha@daitauha.fr> +Matt Good <matt@matt-good.net> +Matt Maker <trip@monstro.us> +Matt Robenolt <matt@ydekproductions.com> +matthew <matthew@trumbell.net> +Matthew Einhorn <moiein2000@gmail.com> +Matthew Gilliard <matthew.gilliard@gmail.com> +Matthew Iversen <teh.ivo@gmail.com> +Matthew Trumbell <matthew@thirdstonepartners.com> +Matthew Willson <matthew@swiftkey.com> +Matthias Bussonnier <bussonniermatthias@gmail.com> +mattip <matti.picus@gmail.com> +Maxim Kurnikov <maxim.kurnikov@gmail.com> +Maxime Rouyrre <rouyrre+git@gmail.com> +mayeut <mayeut@users.noreply.github.com> +mbaluna <44498973+mbaluna@users.noreply.github.com> +mdebi <17590103+mdebi@users.noreply.github.com> +memoselyk <memoselyk@gmail.com> +Michael <michael-k@users.noreply.github.com> +Michael Aquilina <michaelaquilina@gmail.com> +Michael E. Karpeles <michael.karpeles@gmail.com> +Michael Klich <michal@michalklich.com> +Michael Williamson <mike@zwobble.org> +michaelpacer <michaelpacer@gmail.com> +Mickaël Schoentgen <mschoentgen@nuxeo.com> +Miguel Araujo Perez <miguel.araujo.perez@gmail.com> +Mihir Singh <git.service@mihirsingh.com> +Mike <mikeh@blur.com> +Mike Hendricks <mikeh@blur.com> +Min RK <benjaminrk@gmail.com> +MinRK <benjaminrk@gmail.com> +Miro Hrončok <miro@hroncok.cz> +Monica Baluna <mbaluna@bloomberg.net> +montefra <franz.bergesund@gmail.com> +Monty Taylor <mordred@inaugust.com> +Nate Coraor <nate@bx.psu.edu> +Nathaniel J. Smith <njs@pobox.com> +Nehal J Wani <nehaljw.kkd1@gmail.com> +Neil Botelho <neil.botelho321@gmail.com> +Nick Coghlan <ncoghlan@gmail.com> +Nick Stenning <nick@whiteink.com> +Nick Timkovich <prometheus235@gmail.com> +Nicolas Bock <nicolasbock@gmail.com> +Nikhil Benesch <nikhil.benesch@gmail.com> +Nitesh Sharma <nbsharma@outlook.com> +Nowell Strite <nowell@strite.org> +NtaleGrey <Shadikntale@gmail.com> +nvdv <modestdev@gmail.com> +Ofekmeister <ofekmeister@gmail.com> +ofrinevo <ofrine@gmail.com> +Oliver Jeeves <oliver.jeeves@ocado.com> +Oliver Tonnhofer <olt@bogosoft.com> +Olivier Girardot <ssaboum@gmail.com> +Olivier Grisel <olivier.grisel@ensta.org> +Ollie Rutherfurd <orutherfurd@gmail.com> +OMOTO Kenji <k-omoto@m3.com> +Omry Yadan <omry@fb.com> +Oren Held <orenhe@il.ibm.com> +Oscar Benjamin <oscar.j.benjamin@gmail.com> +Oz N Tiram <oz.tiram@gmail.com> +Pachwenko <32424503+Pachwenko@users.noreply.github.com> +Patrick Dubroy <pdubroy@gmail.com> +Patrick Jenkins <patrick@socialgrowthtechnologies.com> +Patrick Lawson <pl@foursquare.com> +patricktokeeffe <patricktokeeffe@users.noreply.github.com> +Patrik Kopkan <pkopkan@redhat.com> +Paul Kehrer <paul.l.kehrer@gmail.com> +Paul Moore <p.f.moore@gmail.com> +Paul Nasrat <pnasrat@gmail.com> +Paul Oswald <pauloswald@gmail.com> +Paul van der Linden <mail@paultjuh.org> +Paulus Schoutsen <paulus@paulusschoutsen.nl> +Pavithra Eswaramoorthy <33131404+QueenCoffee@users.noreply.github.com> +Pawel Jasinski <pawel.jasinski@gmail.com> +Pekka Klärck <peke@iki.fi> +Peter Lisák <peter.lisak@showmax.com> +Peter Waller <peter.waller@gmail.com> +petr-tik <petr-tik@users.noreply.github.com> +Phaneendra Chiruvella <hi@pcx.io> +Phil Freo <phil@philfreo.com> +Phil Pennock <phil@pennock-tech.com> +Phil Whelan <phil123@gmail.com> +Philip Jägenstedt <philip@foolip.org> +Philip Molloy <pamolloy@users.noreply.github.com> +Philippe Ombredanne <pombredanne@gmail.com> +Pi Delport <pjdelport@gmail.com> +Pierre-Yves Rofes <github@rofes.fr> +pip <pypa-dev@googlegroups.com> +Prabakaran Kumaresshan <k_prabakaran+github@hotmail.com> +Prabhjyotsing Surjit Singh Sodhi <psinghsodhi@bloomberg.net> +Prabhu Marappan <prabhum.794@gmail.com> +Pradyun Gedam <pradyunsg@gmail.com> +Pratik Mallya <mallya@us.ibm.com> +Preet Thakkar <preet.thakkar@students.iiit.ac.in> +Preston Holmes <preston@ptone.com> +Przemek Wrzos <hetmankp@none> +Pulkit Goyal <7895pulkit@gmail.com> +Qiangning Hong <hongqn@gmail.com> +Quentin Pradet <quentin.pradet@gmail.com> +R. David Murray <rdmurray@bitdance.com> +Rafael Caricio <rafael.jacinto@gmail.com> +Ralf Schmitt <ralf@systemexit.de> +Razzi Abuissa <razzi53@gmail.com> +rdb <rdb@users.noreply.github.com> +Remi Rampin <r@remirampin.com> +Remi Rampin <remirampin@gmail.com> +Rene Dudfield <renesd@gmail.com> +Riccardo Magliocchetti <riccardo.magliocchetti@gmail.com> +Richard Jones <r1chardj0n3s@gmail.com> +RobberPhex <robberphex@gmail.com> +Robert Collins <rbtcollins@hp.com> +Robert McGibbon <rmcgibbo@gmail.com> +Robert T. McGibbon <rmcgibbo@gmail.com> +robin elisha robinson <elisha.rob@gmail.com> +Roey Berman <roey.berman@gmail.com> +Rohan Jain <crodjer@gmail.com> +Rohan Jain <crodjer@users.noreply.github.com> +Rohan Jain <mail@rohanjain.in> +Roman Bogorodskiy <roman.bogorodskiy@ericsson.com> +Romuald Brunet <romuald@chivil.com> +Ronny Pfannschmidt <Ronny.Pfannschmidt@gmx.de> +Rory McCann <rory@technomancy.org> +Ross Brattain <ross.b.brattain@intel.com> +Roy Wellington Ⅳ <cactus_hugged@yahoo.com> +Roy Wellington Ⅳ <roy@mybasis.com> +Ryan Wooden <rygwdn@gmail.com> +ryneeverett <ryneeverett@gmail.com> +Sachi King <nakato@nakato.io> +Salvatore Rinchiera <salvatore@rinchiera.com> +Savio Jomton <sajo240519@gmail.com> +schlamar <marc.schlaich@gmail.com> +Scott Kitterman <sklist@kitterman.com> +Sean <me@sean.taipei> +seanj <seanj@xyke.com> +Sebastian Jordan <sebastian.jordan.mail@googlemail.com> +Sebastian Schaetz <sschaetz@butterflynetinc.com> +Segev Finer <segev208@gmail.com> +SeongSoo Cho <ppiyakk2@printf.kr> +Sergey Vasilyev <nolar@nolar.info> +Seth Woodworth <seth@sethish.com> +Shlomi Fish <shlomif@shlomifish.org> +Shovan Maity <shovan.maity@mayadata.io> +Simeon Visser <svisser@users.noreply.github.com> +Simon Cross <hodgestar@gmail.com> +Simon Pichugin <simon.pichugin@gmail.com> +sinoroc <sinoroc.code+git@gmail.com> +Sorin Sbarnea <sorin.sbarnea@gmail.com> +Stavros Korokithakis <stavros@korokithakis.net> +Stefan Scherfke <stefan@sofa-rockers.org> +Stephan Erb <github@stephanerb.eu> +stepshal <nessento@openmailbox.org> +Steve (Gadget) Barnes <gadgetsteve@hotmail.com> +Steve Barnes <gadgetsteve@hotmail.com> +Steve Dower <steve.dower@microsoft.com> +Steve Kowalik <steven@wedontsleep.org> +Steven Myint <git@stevenmyint.com> +stonebig <stonebig34@gmail.com> +Stéphane Bidoul (ACSONE) <stephane.bidoul@acsone.eu> +Stéphane Bidoul <stephane.bidoul@acsone.eu> +Stéphane Klein <contact@stephane-klein.info> +Sumana Harihareswara <sh@changeset.nyc> +Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua> +Sviatoslav Sydorenko <wk@sydorenko.org.ua> +Swat009 <swatantra.kumar8@gmail.com> +Takayuki SHIMIZUKAWA <shimizukawa@gmail.com> +tbeswick <tbeswick@enphaseenergy.com> +Thijs Triemstra <info@collab.nl> +Thomas Fenzl <thomas.fenzl@gmail.com> +Thomas Grainger <tagrain@gmail.com> +Thomas Guettler <tguettler@tbz-pariv.de> +Thomas Johansson <devnull@localhost> +Thomas Kluyver <thomas@kluyver.me.uk> +Thomas Smith <smithtg@ncbi.nlm.nih.gov> +Tim D. Smith <github@tim-smith.us> +Tim Gates <tim.gates@iress.com> +Tim Harder <radhermit@gmail.com> +Tim Heap <tim@timheap.me> +tim smith <github@tim-smith.us> +tinruufu <tinruufu@gmail.com> +Tom Forbes <tom@tomforb.es> +Tom Freudenheim <tom.freudenheim@onepeloton.com> +Tom V <tom@viner.tv> +Tomas Orsava <torsava@redhat.com> +Tomer Chachamu <tomer.chachamu@gmail.com> +Tony Beswick <tonybeswick@orcon.net.nz> +Tony Zhaocheng Tan <tony@tonytan.io> +TonyBeswick <TonyBeswick@users.noreply.github.com> +toonarmycaptain <toonarmycaptain@hotmail.com> +Toshio Kuratomi <toshio@fedoraproject.org> +Travis Swicegood <development@domain51.com> +Tzu-ping Chung <uranusjr@gmail.com> +Valentin Haenel <valentin.haenel@gmx.de> +Victor Stinner <victor.stinner@gmail.com> +victorvpaulo <victorvpaulo@gmail.com> +Viktor Szépe <viktor@szepe.net> +Ville Skyttä <ville.skytta@iki.fi> +Vinay Sajip <vinay_sajip@yahoo.co.uk> +Vincent Philippon <sindaewoh@gmail.com> +Vinicyus Macedo <7549205+vinicyusmacedo@users.noreply.github.com> +Vitaly Babiy <vbabiy86@gmail.com> +Vladimir Rutsky <rutsky@users.noreply.github.com> +W. Trevor King <wking@drexel.edu> +Wil Tan <wil@dready.org> +Wilfred Hughes <me@wilfred.me.uk> +William ML Leslie <william.leslie.ttg@gmail.com> +William T Olson <trevor@heytrevor.com> +Wilson Mo <wilsonfv@126.com> +wim glenn <wim.glenn@gmail.com> +Wolfgang Maier <wolfgang.maier@biologie.uni-freiburg.de> +Xavier Fernandez <xav.fernandez@gmail.com> +Xavier Fernandez <xavier.fernandez@polyconseil.fr> +xoviat <xoviat@users.noreply.github.com> +xtreak <tir.karthi@gmail.com> +YAMAMOTO Takashi <yamamoto@midokura.com> +Yen Chi Hsuan <yan12125@gmail.com> +Yeray Diaz Diaz <yeraydiazdiaz@gmail.com> +Yoval P <yoval@gmx.com> +Yu Jian <askingyj@gmail.com> +Yuan Jing Vincent Yan <yyan82@bloomberg.net> +Zearin <zearin@gonk.net> +Zearin <Zearin@users.noreply.github.com> +Zhiping Deng <kofreestyler@gmail.com> +Zvezdan Petkovic <zpetkovic@acm.org> +Łukasz Langa <lukasz@langa.pl> +Семён Марьясин <simeon@maryasin.name> diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/LICENSE.txt b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..737fec5c5352af3d9a6a47a0670da4bdb52c5725 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..cf6c9302c5b0495077d258b68c77e2fe11f90f8f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/METADATA @@ -0,0 +1,13 @@ +Metadata-Version: 2.1 +Name: pkg_resources +Version: 0.0.0 +Summary: UNKNOWN +Home-page: UNKNOWN +Author: UNKNOWN +Author-email: UNKNOWN +License: UNKNOWN +Platform: UNKNOWN + +UNKNOWN + + diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..2bbae1dbdbd1db605ceceba7988d706cca98a055 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/RECORD @@ -0,0 +1,38 @@ +pkg_resources-0.0.0.dist-info/AUTHORS.txt,sha256=RtqU9KfonVGhI48DAA4-yTOBUhBtQTjFhaDzHoyh7uU,21518 +pkg_resources-0.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pkg_resources-0.0.0.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090 +pkg_resources-0.0.0.dist-info/METADATA,sha256=V9_WPOtD1FnuKrTGv6Ique7kAOn2lasvT8W0_iMCCCk,177 +pkg_resources-0.0.0.dist-info/RECORD,, +pkg_resources-0.0.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +pkg_resources/__init__.py,sha256=0IssxXPnaDKpYZRra8Ime0JG4hwosQljItGD0bnIkGk,108349 +pkg_resources/__pycache__/__init__.cpython-38.pyc,, +pkg_resources/__pycache__/py31compat.cpython-38.pyc,, +pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pkg_resources/_vendor/__pycache__/__init__.cpython-38.pyc,, +pkg_resources/_vendor/__pycache__/appdirs.cpython-38.pyc,, +pkg_resources/_vendor/__pycache__/pyparsing.cpython-38.pyc,, +pkg_resources/_vendor/__pycache__/six.cpython-38.pyc,, +pkg_resources/_vendor/appdirs.py,sha256=MievUEuv3l_mQISH5SF0shDk_BNhHHzYiAPrT3ITN4I,24701 +pkg_resources/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 +pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 +pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-38.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-38.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-38.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-38.pyc,, +pkg_resources/_vendor/packaging/__pycache__/markers.cpython-38.pyc,, +pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-38.pyc,, +pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc,, +pkg_resources/_vendor/packaging/__pycache__/utils.cpython-38.pyc,, +pkg_resources/_vendor/packaging/__pycache__/version.cpython-38.pyc,, +pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 +pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 +pkg_resources/_vendor/packaging/markers.py,sha256=uEcBBtGvzqltgnArqb9c4RrcInXezDLos14zbBHhWJo,8248 +pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355 +pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 +pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 +pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 +pkg_resources/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055 +pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +pkg_resources/extern/__init__.py,sha256=cHiEfHuLmm6rs5Ve_ztBfMI7Lr31vss-D4wkqF5xzlI,2498 +pkg_resources/extern/__pycache__/__init__.cpython-38.pyc,, +pkg_resources/py31compat.py,sha256=-WQ0e4c3RG_acdhwC3gLiXhP_lg4G5q7XYkZkQg0gxU,558 diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..ef99c6cf3283b50a273ac4c6d009a0aa85597070 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources-0.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/__init__.py b/backend/test/lib/python3.8/site-packages/pkg_resources/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2f5aa64a6e10832f407601d668e4ef0d9d5d0aeb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/__init__.py @@ -0,0 +1,3296 @@ +# coding: utf-8 +""" +Package resource API +-------------------- + +A resource is a logical file contained within a package, or a logical +subdirectory thereof. The package resource API expects resource names +to have their path parts separated with ``/``, *not* whatever the local +path separator is. Do not use os.path operations to manipulate resource +names being passed into the API. + +The package resource API is designed to work with normal filesystem packages, +.egg files, and unpacked .egg files. It can also work in a limited way with +.zip files and with custom PEP 302 loaders that support the ``get_data()`` +method. +""" + +from __future__ import absolute_import + +import sys +import os +import io +import time +import re +import types +import zipfile +import zipimport +import warnings +import stat +import functools +import pkgutil +import operator +import platform +import collections +import plistlib +import email.parser +import errno +import tempfile +import textwrap +import itertools +import inspect +import ntpath +import posixpath +from pkgutil import get_importer + +try: + import _imp +except ImportError: + # Python 3.2 compatibility + import imp as _imp + +try: + FileExistsError +except NameError: + FileExistsError = OSError + +from pkg_resources.extern import six +from pkg_resources.extern.six.moves import urllib, map, filter + +# capture these to bypass sandboxing +from os import utime +try: + from os import mkdir, rename, unlink + WRITE_SUPPORT = True +except ImportError: + # no write support, probably under GAE + WRITE_SUPPORT = False + +from os import open as os_open +from os.path import isdir, split + +try: + import importlib.machinery as importlib_machinery + # access attribute to force import under delayed import mechanisms. + importlib_machinery.__name__ +except ImportError: + importlib_machinery = None + +from . import py31compat +from pkg_resources.extern import appdirs +from pkg_resources.extern import packaging +__import__('pkg_resources.extern.packaging.version') +__import__('pkg_resources.extern.packaging.specifiers') +__import__('pkg_resources.extern.packaging.requirements') +__import__('pkg_resources.extern.packaging.markers') + + +__metaclass__ = type + + +if (3, 0) < sys.version_info < (3, 5): + raise RuntimeError("Python 3.5 or later is required") + +if six.PY2: + # Those builtin exceptions are only defined in Python 3 + PermissionError = None + NotADirectoryError = None + +# declare some globals that will be defined later to +# satisfy the linters. +require = None +working_set = None +add_activation_listener = None +resources_stream = None +cleanup_resources = None +resource_dir = None +resource_stream = None +set_extraction_path = None +resource_isdir = None +resource_string = None +iter_entry_points = None +resource_listdir = None +resource_filename = None +resource_exists = None +_distribution_finders = None +_namespace_handlers = None +_namespace_packages = None + + +class PEP440Warning(RuntimeWarning): + """ + Used when there is an issue with a version or specifier not complying with + PEP 440. + """ + + +def parse_version(v): + try: + return packaging.version.Version(v) + except packaging.version.InvalidVersion: + return packaging.version.LegacyVersion(v) + + +_state_vars = {} + + +def _declare_state(vartype, **kw): + globals().update(kw) + _state_vars.update(dict.fromkeys(kw, vartype)) + + +def __getstate__(): + state = {} + g = globals() + for k, v in _state_vars.items(): + state[k] = g['_sget_' + v](g[k]) + return state + + +def __setstate__(state): + g = globals() + for k, v in state.items(): + g['_sset_' + _state_vars[k]](k, g[k], v) + return state + + +def _sget_dict(val): + return val.copy() + + +def _sset_dict(key, ob, state): + ob.clear() + ob.update(state) + + +def _sget_object(val): + return val.__getstate__() + + +def _sset_object(key, ob, state): + ob.__setstate__(state) + + +_sget_none = _sset_none = lambda *args: None + + +def get_supported_platform(): + """Return this platform's maximum compatible version. + + distutils.util.get_platform() normally reports the minimum version + of Mac OS X that would be required to *use* extensions produced by + distutils. But what we want when checking compatibility is to know the + version of Mac OS X that we are *running*. To allow usage of packages that + explicitly require a newer version of Mac OS X, we must also know the + current version of the OS. + + If this condition occurs for any other platform with a version in its + platform strings, this function should be extended accordingly. + """ + plat = get_build_platform() + m = macosVersionString.match(plat) + if m is not None and sys.platform == "darwin": + try: + plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) + except ValueError: + # not Mac OS X + pass + return plat + + +__all__ = [ + # Basic resource access and distribution/entry point discovery + 'require', 'run_script', 'get_provider', 'get_distribution', + 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'iter_entry_points', + 'resource_string', 'resource_stream', 'resource_filename', + 'resource_listdir', 'resource_exists', 'resource_isdir', + + # Environmental control + 'declare_namespace', 'working_set', 'add_activation_listener', + 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'get_default_cache', + + # Primary implementation classes + 'Environment', 'WorkingSet', 'ResourceManager', + 'Distribution', 'Requirement', 'EntryPoint', + + # Exceptions + 'ResolutionError', 'VersionConflict', 'DistributionNotFound', + 'UnknownExtra', 'ExtractionError', + + # Warnings + 'PEP440Warning', + + # Parsing functions and string utilities + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', + 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', + + # filesystem utilities + 'ensure_directory', 'normalize_path', + + # Distribution "precedence" constants + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', + + # "Provider" interfaces, implementations, and registration/lookup APIs + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', + 'register_finder', 'register_namespace_handler', 'register_loader_type', + 'fixup_namespace_packages', 'get_importer', + + # Warnings + 'PkgResourcesDeprecationWarning', + + # Deprecated/backward compatibility only + 'run_main', 'AvailableDistributions', +] + + +class ResolutionError(Exception): + """Abstract base for dependency resolution errors""" + + def __repr__(self): + return self.__class__.__name__ + repr(self.args) + + +class VersionConflict(ResolutionError): + """ + An already-installed version conflicts with the requested version. + + Should be initialized with the installed Distribution and the requested + Requirement. + """ + + _template = "{self.dist} is installed but {self.req} is required" + + @property + def dist(self): + return self.args[0] + + @property + def req(self): + return self.args[1] + + def report(self): + return self._template.format(**locals()) + + def with_context(self, required_by): + """ + If required_by is non-empty, return a version of self that is a + ContextualVersionConflict. + """ + if not required_by: + return self + args = self.args + (required_by,) + return ContextualVersionConflict(*args) + + +class ContextualVersionConflict(VersionConflict): + """ + A VersionConflict that accepts a third parameter, the set of the + requirements that required the installed Distribution. + """ + + _template = VersionConflict._template + ' by {self.required_by}' + + @property + def required_by(self): + return self.args[2] + + +class DistributionNotFound(ResolutionError): + """A requested distribution was not found""" + + _template = ("The '{self.req}' distribution was not found " + "and is required by {self.requirers_str}") + + @property + def req(self): + return self.args[0] + + @property + def requirers(self): + return self.args[1] + + @property + def requirers_str(self): + if not self.requirers: + return 'the application' + return ', '.join(self.requirers) + + def report(self): + return self._template.format(**locals()) + + def __str__(self): + return self.report() + + +class UnknownExtra(ResolutionError): + """Distribution doesn't have an "extra feature" of the given name""" + + +_provider_factories = {} + +PY_MAJOR = '{}.{}'.format(*sys.version_info) +EGG_DIST = 3 +BINARY_DIST = 2 +SOURCE_DIST = 1 +CHECKOUT_DIST = 0 +DEVELOP_DIST = -1 + + +def register_loader_type(loader_type, provider_factory): + """Register `provider_factory` to make providers for `loader_type` + + `loader_type` is the type or class of a PEP 302 ``module.__loader__``, + and `provider_factory` is a function that, passed a *module* object, + returns an ``IResourceProvider`` for that module. + """ + _provider_factories[loader_type] = provider_factory + + +def get_provider(moduleOrReq): + """Return an IResourceProvider for the named module or requirement""" + if isinstance(moduleOrReq, Requirement): + return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] + try: + module = sys.modules[moduleOrReq] + except KeyError: + __import__(moduleOrReq) + module = sys.modules[moduleOrReq] + loader = getattr(module, '__loader__', None) + return _find_adapter(_provider_factories, loader)(module) + + +def _macosx_vers(_cache=[]): + if not _cache: + version = platform.mac_ver()[0] + # fallback for MacPorts + if version == '': + plist = '/System/Library/CoreServices/SystemVersion.plist' + if os.path.exists(plist): + if hasattr(plistlib, 'readPlist'): + plist_content = plistlib.readPlist(plist) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] + + _cache.append(version.split('.')) + return _cache[0] + + +def _macosx_arch(machine): + return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) + + +def get_build_platform(): + """Return this platform's string for platform-specific distributions + + XXX Currently this is the same as ``distutils.util.get_platform()``, but it + needs some hacks for Linux and Mac OS X. + """ + from sysconfig import get_platform + + plat = get_platform() + if sys.platform == "darwin" and not plat.startswith('macosx-'): + try: + version = _macosx_vers() + machine = os.uname()[4].replace(" ", "_") + return "macosx-%d.%d-%s" % ( + int(version[0]), int(version[1]), + _macosx_arch(machine), + ) + except ValueError: + # if someone is running a non-Mac darwin system, this will fall + # through to the default implementation + pass + return plat + + +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") +# XXX backward compat +get_platform = get_build_platform + + +def compatible_platforms(provided, required): + """Can code for the `provided` platform run on the `required` platform? + + Returns true if either platform is ``None``, or the platforms are equal. + + XXX Needs compatibility checks for Linux and other unixy OSes. + """ + if provided is None or required is None or provided == required: + # easy case + return True + + # Mac OS X special cases + reqMac = macosVersionString.match(required) + if reqMac: + provMac = macosVersionString.match(provided) + + # is this a Mac package? + if not provMac: + # this is backwards compatibility for packages built before + # setuptools 0.6. All packages built after this point will + # use the new macosx designation. + provDarwin = darwinVersionString.match(provided) + if provDarwin: + dversion = int(provDarwin.group(1)) + macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) + if dversion == 7 and macosversion >= "10.3" or \ + dversion == 8 and macosversion >= "10.4": + return True + # egg isn't macosx or legacy darwin + return False + + # are they the same major version and machine type? + if provMac.group(1) != reqMac.group(1) or \ + provMac.group(3) != reqMac.group(3): + return False + + # is the required OS major update >= the provided one? + if int(provMac.group(2)) > int(reqMac.group(2)): + return False + + return True + + # XXX Linux and other platforms' special cases should go here + return False + + +def run_script(dist_spec, script_name): + """Locate distribution `dist_spec` and run its `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + require(dist_spec)[0].run_script(script_name, ns) + + +# backward compatibility +run_main = run_script + + +def get_distribution(dist): + """Return a current distribution object for a Requirement or string""" + if isinstance(dist, six.string_types): + dist = Requirement.parse(dist) + if isinstance(dist, Requirement): + dist = get_provider(dist) + if not isinstance(dist, Distribution): + raise TypeError("Expected string, Requirement, or Distribution", dist) + return dist + + +def load_entry_point(dist, group, name): + """Return `name` entry point of `group` for `dist` or raise ImportError""" + return get_distribution(dist).load_entry_point(group, name) + + +def get_entry_map(dist, group=None): + """Return the entry point map for `group`, or the full entry map""" + return get_distribution(dist).get_entry_map(group) + + +def get_entry_info(dist, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return get_distribution(dist).get_entry_info(group, name) + + +class IMetadataProvider: + def has_metadata(name): + """Does the package's distribution contain the named metadata?""" + + def get_metadata(name): + """The named metadata resource as a string""" + + def get_metadata_lines(name): + """Yield named metadata resource as list of non-blank non-comment lines + + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" + + def metadata_isdir(name): + """Is the named metadata a directory? (like ``os.path.isdir()``)""" + + def metadata_listdir(name): + """List of metadata names in the directory (like ``os.listdir()``)""" + + def run_script(script_name, namespace): + """Execute the named script in the supplied namespace dictionary""" + + +class IResourceProvider(IMetadataProvider): + """An object that provides access to package resources""" + + def get_resource_filename(manager, resource_name): + """Return a true filesystem path for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_stream(manager, resource_name): + """Return a readable file-like object for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_string(manager, resource_name): + """Return a string containing the contents of `resource_name` + + `manager` must be an ``IResourceManager``""" + + def has_resource(resource_name): + """Does the package contain the named resource?""" + + def resource_isdir(resource_name): + """Is the named resource a directory? (like ``os.path.isdir()``)""" + + def resource_listdir(resource_name): + """List of resource names in the directory (like ``os.listdir()``)""" + + +class WorkingSet: + """A collection of active distributions on sys.path (or a similar list)""" + + def __init__(self, entries=None): + """Create working set from list of path entries (default=sys.path)""" + self.entries = [] + self.entry_keys = {} + self.by_key = {} + self.callbacks = [] + + if entries is None: + entries = sys.path + + for entry in entries: + self.add_entry(entry) + + @classmethod + def _build_master(cls): + """ + Prepare the master working set. + """ + ws = cls() + try: + from __main__ import __requires__ + except ImportError: + # The main program does not list any requirements + return ws + + # ensure the requirements are met + try: + ws.require(__requires__) + except VersionConflict: + return cls._build_from_requirements(__requires__) + + return ws + + @classmethod + def _build_from_requirements(cls, req_spec): + """ + Build a working set from a requirement spec. Rewrites sys.path. + """ + # try it without defaults already on sys.path + # by starting with an empty path + ws = cls([]) + reqs = parse_requirements(req_spec) + dists = ws.resolve(reqs, Environment()) + for dist in dists: + ws.add(dist) + + # add any missing entries from sys.path + for entry in sys.path: + if entry not in ws.entries: + ws.add_entry(entry) + + # then copy back to sys.path + sys.path[:] = ws.entries + return ws + + def add_entry(self, entry): + """Add a path item to ``.entries``, finding any distributions on it + + ``find_distributions(entry, True)`` is used to find distributions + corresponding to the path entry, and they are added. `entry` is + always appended to ``.entries``, even if it is already present. + (This is because ``sys.path`` can contain the same value more than + once, and the ``.entries`` of the ``sys.path`` WorkingSet should always + equal ``sys.path``.) + """ + self.entry_keys.setdefault(entry, []) + self.entries.append(entry) + for dist in find_distributions(entry, True): + self.add(dist, entry, False) + + def __contains__(self, dist): + """True if `dist` is the active distribution for its project""" + return self.by_key.get(dist.key) == dist + + def find(self, req): + """Find a distribution matching requirement `req` + + If there is an active distribution for the requested project, this + returns it as long as it meets the version requirement specified by + `req`. But, if there is an active distribution for the project and it + does *not* meet the `req` requirement, ``VersionConflict`` is raised. + If there is no active distribution for the requested project, ``None`` + is returned. + """ + dist = self.by_key.get(req.key) + if dist is not None and dist not in req: + # XXX add more info + raise VersionConflict(dist, req) + return dist + + def iter_entry_points(self, group, name=None): + """Yield entry point objects from `group` matching `name` + + If `name` is None, yields all entry points in `group` from all + distributions in the working set, otherwise only ones matching + both `group` and `name` are yielded (in distribution order). + """ + return ( + entry + for dist in self + for entry in dist.get_entry_map(group).values() + if name is None or name == entry.name + ) + + def run_script(self, requires, script_name): + """Locate distribution for `requires` and run `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + self.require(requires)[0].run_script(script_name, ns) + + def __iter__(self): + """Yield distributions for non-duplicate projects in the working set + + The yield order is the order in which the items' path entries were + added to the working set. + """ + seen = {} + for item in self.entries: + if item not in self.entry_keys: + # workaround a cache issue + continue + + for key in self.entry_keys[item]: + if key not in seen: + seen[key] = 1 + yield self.by_key[key] + + def add(self, dist, entry=None, insert=True, replace=False): + """Add `dist` to working set, associated with `entry` + + If `entry` is unspecified, it defaults to the ``.location`` of `dist`. + On exit from this routine, `entry` is added to the end of the working + set's ``.entries`` (if it wasn't already present). + + `dist` is only added to the working set if it's for a project that + doesn't already have a distribution in the set, unless `replace=True`. + If it's added, any callbacks registered with the ``subscribe()`` method + will be called. + """ + if insert: + dist.insert_on(self.entries, entry, replace=replace) + + if entry is None: + entry = dist.location + keys = self.entry_keys.setdefault(entry, []) + keys2 = self.entry_keys.setdefault(dist.location, []) + if not replace and dist.key in self.by_key: + # ignore hidden distros + return + + self.by_key[dist.key] = dist + if dist.key not in keys: + keys.append(dist.key) + if dist.key not in keys2: + keys2.append(dist.key) + self._added_new(dist) + + def resolve(self, requirements, env=None, installer=None, + replace_conflicting=False, extras=None): + """List all distributions needed to (recursively) meet `requirements` + + `requirements` must be a sequence of ``Requirement`` objects. `env`, + if supplied, should be an ``Environment`` instance. If + not supplied, it defaults to all distributions available within any + entry or distribution in the working set. `installer`, if supplied, + will be invoked with each requirement that cannot be met by an + already-installed distribution; it should return a ``Distribution`` or + ``None``. + + Unless `replace_conflicting=True`, raises a VersionConflict exception + if + any requirements are found on the path that have the correct name but + the wrong version. Otherwise, if an `installer` is supplied it will be + invoked to obtain the correct version of the requirement and activate + it. + + `extras` is a list of the extras to be used with these requirements. + This is important because extra requirements may look like `my_req; + extra = "my_extra"`, which would otherwise be interpreted as a purely + optional requirement. Instead, we want to be able to assert that these + requirements are truly required. + """ + + # set up the stack + requirements = list(requirements)[::-1] + # set of processed requirements + processed = {} + # key -> dist + best = {} + to_activate = [] + + req_extras = _ReqExtras() + + # Mapping of requirement to set of distributions that required it; + # useful for reporting info about conflicts. + required_by = collections.defaultdict(set) + + while requirements: + # process dependencies breadth-first + req = requirements.pop(0) + if req in processed: + # Ignore cyclic or redundant dependencies + continue + + if not req_extras.markers_pass(req, extras): + continue + + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match( + req, ws, installer, + replace_conflicting=replace_conflicting + ) + if dist is None: + requirers = required_by.get(req, None) + raise DistributionNotFound(req, requirers) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + + # push the new requirements onto the stack + new_requirements = dist.requires(req.extras)[::-1] + requirements.extend(new_requirements) + + # Register the new requirements needed by req + for new_requirement in new_requirements: + required_by[new_requirement].add(req.project_name) + req_extras[new_requirement] = req.extras + + processed[req] = True + + # return list of distros to activate + return to_activate + + def find_plugins( + self, plugin_env, full_env=None, installer=None, fallback=True): + """Find all activatable distributions in `plugin_env` + + Example usage:: + + distributions, errors = working_set.find_plugins( + Environment(plugin_dirlist) + ) + # add plugins+libs to sys.path + map(working_set.add, distributions) + # display errors + print('Could not load', errors) + + The `plugin_env` should be an ``Environment`` instance that contains + only distributions that are in the project's "plugin directory" or + directories. The `full_env`, if supplied, should be an ``Environment`` + contains all currently-available distributions. If `full_env` is not + supplied, one is created automatically from the ``WorkingSet`` this + method is called on, which will typically mean that every directory on + ``sys.path`` will be scanned for distributions. + + `installer` is a standard installer callback as used by the + ``resolve()`` method. The `fallback` flag indicates whether we should + attempt to resolve older versions of a plugin if the newest version + cannot be resolved. + + This method returns a 2-tuple: (`distributions`, `error_info`), where + `distributions` is a list of the distributions found in `plugin_env` + that were loadable, along with any other distributions that are needed + to resolve their dependencies. `error_info` is a dictionary mapping + unloadable plugin distributions to an exception instance describing the + error that occurred. Usually this will be a ``DistributionNotFound`` or + ``VersionConflict`` instance. + """ + + plugin_projects = list(plugin_env) + # scan project names in alphabetic order + plugin_projects.sort() + + error_info = {} + distributions = {} + + if full_env is None: + env = Environment(self.entries) + env += plugin_env + else: + env = full_env + plugin_env + + shadow_set = self.__class__([]) + # put all our entries in shadow_set + list(map(shadow_set.add, self)) + + for project_name in plugin_projects: + + for dist in plugin_env[project_name]: + + req = [dist.as_requirement()] + + try: + resolvees = shadow_set.resolve(req, env, installer) + + except ResolutionError as v: + # save error info + error_info[dist] = v + if fallback: + # try the next older version of project + continue + else: + # give up on this project, keep going + break + + else: + list(map(shadow_set.add, resolvees)) + distributions.update(dict.fromkeys(resolvees)) + + # success, no need to try any more versions of this project + break + + distributions = list(distributions) + distributions.sort() + + return distributions, error_info + + def require(self, *requirements): + """Ensure that distributions matching `requirements` are activated + + `requirements` must be a string or a (possibly-nested) sequence + thereof, specifying the distributions and versions required. The + return value is a sequence of the distributions that needed to be + activated to fulfill the requirements; all relevant distributions are + included, even if they were already activated in this working set. + """ + needed = self.resolve(parse_requirements(requirements)) + + for dist in needed: + self.add(dist) + + return needed + + def subscribe(self, callback, existing=True): + """Invoke `callback` for all distributions + + If `existing=True` (default), + call on all existing ones, as well. + """ + if callback in self.callbacks: + return + self.callbacks.append(callback) + if not existing: + return + for dist in self: + callback(dist) + + def _added_new(self, dist): + for callback in self.callbacks: + callback(dist) + + def __getstate__(self): + return ( + self.entries[:], self.entry_keys.copy(), self.by_key.copy(), + self.callbacks[:] + ) + + def __setstate__(self, e_k_b_c): + entries, keys, by_key, callbacks = e_k_b_c + self.entries = entries[:] + self.entry_keys = keys.copy() + self.by_key = by_key.copy() + self.callbacks = callbacks[:] + + +class _ReqExtras(dict): + """ + Map each requirement to the extras that demanded it. + """ + + def markers_pass(self, req, extras=None): + """ + Evaluate markers for req against each extra that + demanded it. + + Return False if the req has a marker and fails + evaluation. Otherwise, return True. + """ + extra_evals = ( + req.marker.evaluate({'extra': extra}) + for extra in self.get(req, ()) + (extras or (None,)) + ) + return not req.marker or any(extra_evals) + + +class Environment: + """Searchable snapshot of distributions on a search path""" + + def __init__( + self, search_path=None, platform=get_supported_platform(), + python=PY_MAJOR): + """Snapshot distributions available on a search path + + Any distributions found on `search_path` are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. + + `platform` is an optional string specifying the name of the platform + that platform-specific distributions must be compatible with. If + unspecified, it defaults to the current platform. `python` is an + optional string naming the desired version of Python (e.g. ``'3.6'``); + it defaults to the current version. + + You may explicitly set `platform` (and/or `python`) to ``None`` if you + wish to map *all* distributions, not just those compatible with the + running platform or Python version. + """ + self._distmap = {} + self.platform = platform + self.python = python + self.scan(search_path) + + def can_add(self, dist): + """Is distribution `dist` acceptable for this environment? + + The distribution must match the platform and python version + requirements specified when this environment was created, or False + is returned. + """ + py_compat = ( + self.python is None + or dist.py_version is None + or dist.py_version == self.python + ) + return py_compat and compatible_platforms(dist.platform, self.platform) + + def remove(self, dist): + """Remove `dist` from the environment""" + self._distmap[dist.key].remove(dist) + + def scan(self, search_path=None): + """Scan `search_path` for distributions usable in this environment + + Any distributions found are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. Only distributions conforming to + the platform/python version defined at initialization are added. + """ + if search_path is None: + search_path = sys.path + + for item in search_path: + for dist in find_distributions(item): + self.add(dist) + + def __getitem__(self, project_name): + """Return a newest-to-oldest list of distributions for `project_name` + + Uses case-insensitive `project_name` comparison, assuming all the + project's distributions use their project's name converted to all + lowercase as their key. + + """ + distribution_key = project_name.lower() + return self._distmap.get(distribution_key, []) + + def add(self, dist): + """Add `dist` if we ``can_add()`` it and it has not already been added + """ + if self.can_add(dist) and dist.has_version(): + dists = self._distmap.setdefault(dist.key, []) + if dist not in dists: + dists.append(dist) + dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) + + def best_match( + self, req, working_set, installer=None, replace_conflicting=False): + """Find distribution best matching `req` and usable on `working_set` + + This calls the ``find(req)`` method of the `working_set` to see if a + suitable distribution is already active. (This may raise + ``VersionConflict`` if an unsuitable version of the project is already + active in the specified `working_set`.) If a suitable distribution + isn't active, this method returns the newest distribution in the + environment that meets the ``Requirement`` in `req`. If no suitable + distribution is found, and `installer` is supplied, then the result of + calling the environment's ``obtain(req, installer)`` method will be + returned. + """ + try: + dist = working_set.find(req) + except VersionConflict: + if not replace_conflicting: + raise + dist = None + if dist is not None: + return dist + for dist in self[req.key]: + if dist in req: + return dist + # try to download/install + return self.obtain(req, installer) + + def obtain(self, requirement, installer=None): + """Obtain a distribution matching `requirement` (e.g. via download) + + Obtain a distro that matches requirement (e.g. via download). In the + base ``Environment`` class, this routine just returns + ``installer(requirement)``, unless `installer` is None, in which case + None is returned instead. This method is a hook that allows subclasses + to attempt other ways of obtaining a distribution before falling back + to the `installer` argument.""" + if installer is not None: + return installer(requirement) + + def __iter__(self): + """Yield the unique project names of the available distributions""" + for key in self._distmap.keys(): + if self[key]: + yield key + + def __iadd__(self, other): + """In-place addition of a distribution or environment""" + if isinstance(other, Distribution): + self.add(other) + elif isinstance(other, Environment): + for project in other: + for dist in other[project]: + self.add(dist) + else: + raise TypeError("Can't add %r to environment" % (other,)) + return self + + def __add__(self, other): + """Add an environment or distribution to an environment""" + new = self.__class__([], platform=None, python=None) + for env in self, other: + new += env + return new + + +# XXX backward compatibility +AvailableDistributions = Environment + + +class ExtractionError(RuntimeError): + """An error occurred extracting a resource + + The following attributes are available from instances of this exception: + + manager + The resource manager that raised this exception + + cache_path + The base directory for resource extraction + + original_error + The exception instance that caused extraction to fail + """ + + +class ResourceManager: + """Manage resource extraction and packages""" + extraction_path = None + + def __init__(self): + self.cached_files = {} + + def resource_exists(self, package_or_requirement, resource_name): + """Does the named resource exist?""" + return get_provider(package_or_requirement).has_resource(resource_name) + + def resource_isdir(self, package_or_requirement, resource_name): + """Is the named resource an existing directory?""" + return get_provider(package_or_requirement).resource_isdir( + resource_name + ) + + def resource_filename(self, package_or_requirement, resource_name): + """Return a true filesystem path for specified resource""" + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name + ) + + def resource_stream(self, package_or_requirement, resource_name): + """Return a readable file-like object for specified resource""" + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name + ) + + def resource_string(self, package_or_requirement, resource_name): + """Return specified resource as a string""" + return get_provider(package_or_requirement).get_resource_string( + self, resource_name + ) + + def resource_listdir(self, package_or_requirement, resource_name): + """List the contents of the named resource directory""" + return get_provider(package_or_requirement).resource_listdir( + resource_name + ) + + def extraction_error(self): + """Give an error message for problems extracting file(s)""" + + old_exc = sys.exc_info()[1] + cache_path = self.extraction_path or get_default_cache() + + tmpl = textwrap.dedent(""" + Can't extract file(s) to egg cache + + The following error occurred while trying to extract file(s) + to the Python egg cache: + + {old_exc} + + The Python egg cache directory is currently set to: + + {cache_path} + + Perhaps your account does not have write access to this directory? + You can change the cache directory by setting the PYTHON_EGG_CACHE + environment variable to point to an accessible directory. + """).lstrip() + err = ExtractionError(tmpl.format(**locals())) + err.manager = self + err.cache_path = cache_path + err.original_error = old_exc + raise err + + def get_cache_path(self, archive_name, names=()): + """Return absolute location in cache for `archive_name` and `names` + + The parent directory of the resulting path will be created if it does + not already exist. `archive_name` should be the base filename of the + enclosing egg (which may not be the name of the enclosing zipfile!), + including its ".egg" extension. `names`, if provided, should be a + sequence of path name parts "under" the egg's extraction location. + + This method should only be called by resource providers that need to + obtain an extraction location, and only for names they intend to + extract, as it tracks the generated names for possible cleanup later. + """ + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name + '-tmp', *names) + try: + _bypass_ensure_directory(target_path) + except Exception: + self.extraction_error() + + self._warn_unsafe_extraction_path(extract_path) + + self.cached_files[target_path] = 1 + return target_path + + @staticmethod + def _warn_unsafe_extraction_path(path): + """ + If the default extraction path is overridden and set to an insecure + location, such as /tmp, it opens up an opportunity for an attacker to + replace an extracted file with an unauthorized payload. Warn the user + if a known insecure location is used. + + See Distribute #375 for more details. + """ + if os.name == 'nt' and not path.startswith(os.environ['windir']): + # On Windows, permissions are generally restrictive by default + # and temp directories are not writable by other users, so + # bypass the warning. + return + mode = os.stat(path).st_mode + if mode & stat.S_IWOTH or mode & stat.S_IWGRP: + msg = ( + "%s is writable by group/others and vulnerable to attack " + "when " + "used with get_resource_filename. Consider a more secure " + "location (set with .set_extraction_path or the " + "PYTHON_EGG_CACHE environment variable)." % path + ) + warnings.warn(msg, UserWarning) + + def postprocess(self, tempname, filename): + """Perform any platform-specific postprocessing of `tempname` + + This is where Mac header rewrites should be done; other platforms don't + have anything special they should do. + + Resource providers should call this method ONLY after successfully + extracting a compressed resource. They must NOT call it on resources + that are already in the filesystem. + + `tempname` is the current (temporary) name of the file, and `filename` + is the name it will be renamed to by the caller after this routine + returns. + """ + + if os.name == 'posix': + # Make the resource executable + mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 + os.chmod(tempname, mode) + + def set_extraction_path(self, path): + """Set the base path where resources will be extracted to, if needed. + + If you do not call this routine before any extractions take place, the + path defaults to the return value of ``get_default_cache()``. (Which + is based on the ``PYTHON_EGG_CACHE`` environment variable, with various + platform-specific fallbacks. See that routine's documentation for more + details.) + + Resources are extracted to subdirectories of this path based upon + information given by the ``IResourceProvider``. You may set this to a + temporary directory, but then you must call ``cleanup_resources()`` to + delete the extracted files when done. There is no guarantee that + ``cleanup_resources()`` will be able to remove all extracted files. + + (Note: you may not change the extraction path for a given resource + manager once resources have been extracted, unless you first call + ``cleanup_resources()``.) + """ + if self.cached_files: + raise ValueError( + "Can't change extraction path, files already extracted" + ) + + self.extraction_path = path + + def cleanup_resources(self, force=False): + """ + Delete all extracted resource files and directories, returning a list + of the file and directory names that could not be successfully removed. + This function does not have any concurrency protection, so it should + generally only be called when the extraction path is a temporary + directory exclusive to a single process. This method is not + automatically called; you must call it explicitly or register it as an + ``atexit`` function if you wish to ensure cleanup of a temporary + directory used for extractions. + """ + # XXX + + +def get_default_cache(): + """ + Return the ``PYTHON_EGG_CACHE`` environment variable + or a platform-relevant user cache dir for an app + named "Python-Eggs". + """ + return ( + os.environ.get('PYTHON_EGG_CACHE') + or appdirs.user_cache_dir(appname='Python-Eggs') + ) + + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """ + Convert an arbitrary string to a standard version string + """ + try: + # normalize the version + return str(packaging.version.Version(version)) + except packaging.version.InvalidVersion: + version = version.replace(' ', '.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def safe_extra(extra): + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + """ + return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-', '_') + + +def invalid_marker(text): + """ + Validate text as a PEP 508 environment marker; return an exception + if invalid or False otherwise. + """ + try: + evaluate_marker(text) + except SyntaxError as e: + e.filename = None + e.lineno = None + return e + return False + + +def evaluate_marker(text, extra=None): + """ + Evaluate a PEP 508 environment marker. + Return a boolean indicating the marker result in this environment. + Raise SyntaxError if marker is invalid. + + This implementation uses the 'pyparsing' module. + """ + try: + marker = packaging.markers.Marker(text) + return marker.evaluate() + except packaging.markers.InvalidMarker as e: + raise SyntaxError(e) + + +class NullProvider: + """Try to implement resources and metadata for arbitrary PEP 302 loaders""" + + egg_name = None + egg_info = None + loader = None + + def __init__(self, module): + self.loader = getattr(module, '__loader__', None) + self.module_path = os.path.dirname(getattr(module, '__file__', '')) + + def get_resource_filename(self, manager, resource_name): + return self._fn(self.module_path, resource_name) + + def get_resource_stream(self, manager, resource_name): + return io.BytesIO(self.get_resource_string(manager, resource_name)) + + def get_resource_string(self, manager, resource_name): + return self._get(self._fn(self.module_path, resource_name)) + + def has_resource(self, resource_name): + return self._has(self._fn(self.module_path, resource_name)) + + def _get_metadata_path(self, name): + return self._fn(self.egg_info, name) + + def has_metadata(self, name): + if not self.egg_info: + return self.egg_info + + path = self._get_metadata_path(name) + return self._has(path) + + def get_metadata(self, name): + if not self.egg_info: + return "" + path = self._get_metadata_path(name) + value = self._get(path) + if six.PY2: + return value + try: + return value.decode('utf-8') + except UnicodeDecodeError as exc: + # Include the path in the error message to simplify + # troubleshooting, and without changing the exception type. + exc.reason += ' in {} file at path: {}'.format(name, path) + raise + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + def resource_isdir(self, resource_name): + return self._isdir(self._fn(self.module_path, resource_name)) + + def metadata_isdir(self, name): + return self.egg_info and self._isdir(self._fn(self.egg_info, name)) + + def resource_listdir(self, resource_name): + return self._listdir(self._fn(self.module_path, resource_name)) + + def metadata_listdir(self, name): + if self.egg_info: + return self._listdir(self._fn(self.egg_info, name)) + return [] + + def run_script(self, script_name, namespace): + script = 'scripts/' + script_name + if not self.has_metadata(script): + raise ResolutionError( + "Script {script!r} not found in metadata at {self.egg_info!r}" + .format(**locals()), + ) + script_text = self.get_metadata(script).replace('\r\n', '\n') + script_text = script_text.replace('\r', '\n') + script_filename = self._fn(self.egg_info, script) + namespace['__file__'] = script_filename + if os.path.exists(script_filename): + source = open(script_filename).read() + code = compile(source, script_filename, 'exec') + exec(code, namespace, namespace) + else: + from linecache import cache + cache[script_filename] = ( + len(script_text), 0, script_text.split('\n'), script_filename + ) + script_code = compile(script_text, script_filename, 'exec') + exec(script_code, namespace, namespace) + + def _has(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _isdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _listdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _fn(self, base, resource_name): + self._validate_resource_path(resource_name) + if resource_name: + return os.path.join(base, *resource_name.split('/')) + return base + + @staticmethod + def _validate_resource_path(path): + """ + Validate the resource paths according to the docs. + https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access + + >>> warned = getfixture('recwarn') + >>> warnings.simplefilter('always') + >>> vrp = NullProvider._validate_resource_path + >>> vrp('foo/bar.txt') + >>> bool(warned) + False + >>> vrp('../foo/bar.txt') + >>> bool(warned) + True + >>> warned.clear() + >>> vrp('/foo/bar.txt') + >>> bool(warned) + True + >>> vrp('foo/../../bar.txt') + >>> bool(warned) + True + >>> warned.clear() + >>> vrp('foo/f../bar.txt') + >>> bool(warned) + False + + Windows path separators are straight-up disallowed. + >>> vrp(r'\\foo/bar.txt') + Traceback (most recent call last): + ... + ValueError: Use of .. or absolute path in a resource path \ +is not allowed. + + >>> vrp(r'C:\\foo/bar.txt') + Traceback (most recent call last): + ... + ValueError: Use of .. or absolute path in a resource path \ +is not allowed. + + Blank values are allowed + + >>> vrp('') + >>> bool(warned) + False + + Non-string values are not. + + >>> vrp(None) + Traceback (most recent call last): + ... + AttributeError: ... + """ + invalid = ( + os.path.pardir in path.split(posixpath.sep) or + posixpath.isabs(path) or + ntpath.isabs(path) + ) + if not invalid: + return + + msg = "Use of .. or absolute path in a resource path is not allowed." + + # Aggressively disallow Windows absolute paths + if ntpath.isabs(path) and not posixpath.isabs(path): + raise ValueError(msg) + + # for compatibility, warn; in future + # raise ValueError(msg) + warnings.warn( + msg[:-1] + " and will raise exceptions in a future release.", + DeprecationWarning, + stacklevel=4, + ) + + def _get(self, path): + if hasattr(self.loader, 'get_data'): + return self.loader.get_data(path) + raise NotImplementedError( + "Can't perform this operation for loaders without 'get_data()'" + ) + + +register_loader_type(object, NullProvider) + + +class EggProvider(NullProvider): + """Provider based on a virtual filesystem""" + + def __init__(self, module): + NullProvider.__init__(self, module) + self._setup_prefix() + + def _setup_prefix(self): + # we assume here that our metadata may be nested inside a "basket" + # of multiple eggs; that's why we use module_path instead of .archive + path = self.module_path + old = None + while path != old: + if _is_egg_path(path): + self.egg_name = os.path.basename(path) + self.egg_info = os.path.join(path, 'EGG-INFO') + self.egg_root = path + break + old = path + path, base = os.path.split(path) + + +class DefaultProvider(EggProvider): + """Provides access to package resources in the filesystem""" + + def _has(self, path): + return os.path.exists(path) + + def _isdir(self, path): + return os.path.isdir(path) + + def _listdir(self, path): + return os.listdir(path) + + def get_resource_stream(self, manager, resource_name): + return open(self._fn(self.module_path, resource_name), 'rb') + + def _get(self, path): + with open(path, 'rb') as stream: + return stream.read() + + @classmethod + def _register(cls): + loader_names = 'SourceFileLoader', 'SourcelessFileLoader', + for name in loader_names: + loader_cls = getattr(importlib_machinery, name, type(None)) + register_loader_type(loader_cls, cls) + + +DefaultProvider._register() + + +class EmptyProvider(NullProvider): + """Provider that returns nothing for all requests""" + + module_path = None + + _isdir = _has = lambda self, path: False + + def _get(self, path): + return '' + + def _listdir(self, path): + return [] + + def __init__(self): + pass + + +empty_provider = EmptyProvider() + + +class ZipManifests(dict): + """ + zip manifest builder + """ + + @classmethod + def build(cls, path): + """ + Build a dictionary similar to the zipimport directory + caches, except instead of tuples, store ZipInfo objects. + + Use a platform-specific path separator (os.sep) for the path keys + for compatibility with pypy on Windows. + """ + with zipfile.ZipFile(path) as zfile: + items = ( + ( + name.replace('/', os.sep), + zfile.getinfo(name), + ) + for name in zfile.namelist() + ) + return dict(items) + + load = build + + +class MemoizedZipManifests(ZipManifests): + """ + Memoized zipfile manifests. + """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') + + def load(self, path): + """ + Load a manifest at path or return a suitable manifest already loaded. + """ + path = os.path.normpath(path) + mtime = os.stat(path).st_mtime + + if path not in self or self[path].mtime != mtime: + manifest = self.build(path) + self[path] = self.manifest_mod(manifest, mtime) + + return self[path].manifest + + +class ZipProvider(EggProvider): + """Resource support for zips and eggs""" + + eagers = None + _zip_manifests = MemoizedZipManifests() + + def __init__(self, module): + EggProvider.__init__(self, module) + self.zip_pre = self.loader.archive + os.sep + + def _zipinfo_name(self, fspath): + # Convert a virtual filename (full path to file) into a zipfile subpath + # usable with the zipimport directory cache for our target archive + fspath = fspath.rstrip(os.sep) + if fspath == self.loader.archive: + return '' + if fspath.startswith(self.zip_pre): + return fspath[len(self.zip_pre):] + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.zip_pre) + ) + + def _parts(self, zip_path): + # Convert a zipfile subpath into an egg-relative path part list. + # pseudo-fs path + fspath = self.zip_pre + zip_path + if fspath.startswith(self.egg_root + os.sep): + return fspath[len(self.egg_root) + 1:].split(os.sep) + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.egg_root) + ) + + @property + def zipinfo(self): + return self._zip_manifests.load(self.loader.archive) + + def get_resource_filename(self, manager, resource_name): + if not self.egg_name: + raise NotImplementedError( + "resource_filename() only supported for .egg, not .zip" + ) + # no need to lock for extraction, since we use temp names + zip_path = self._resource_to_zip(resource_name) + eagers = self._get_eager_resources() + if '/'.join(self._parts(zip_path)) in eagers: + for name in eagers: + self._extract_resource(manager, self._eager_to_zip(name)) + return self._extract_resource(manager, zip_path) + + @staticmethod + def _get_date_and_size(zip_stat): + size = zip_stat.file_size + # ymdhms+wday, yday, dst + date_time = zip_stat.date_time + (0, 0, -1) + # 1980 offset already done + timestamp = time.mktime(date_time) + return timestamp, size + + def _extract_resource(self, manager, zip_path): + + if zip_path in self._index(): + for name in self._index()[zip_path]: + last = self._extract_resource( + manager, os.path.join(zip_path, name) + ) + # return the extracted directory name + return os.path.dirname(last) + + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + + if not WRITE_SUPPORT: + raise IOError('"os.rename" and "os.unlink" are not supported ' + 'on this platform') + try: + + real_path = manager.get_cache_path( + self.egg_name, self._parts(zip_path) + ) + + if self._is_current(real_path, zip_path): + return real_path + + outf, tmpnam = _mkstemp( + ".$extract", + dir=os.path.dirname(real_path), + ) + os.write(outf, self.loader.get_data(zip_path)) + os.close(outf) + utime(tmpnam, (timestamp, timestamp)) + manager.postprocess(tmpnam, real_path) + + try: + rename(tmpnam, real_path) + + except os.error: + if os.path.isfile(real_path): + if self._is_current(real_path, zip_path): + # the file became current since it was checked above, + # so proceed. + return real_path + # Windows, del old file and retry + elif os.name == 'nt': + unlink(real_path) + rename(tmpnam, real_path) + return real_path + raise + + except os.error: + # report a user-friendly error + manager.extraction_error() + + return real_path + + def _is_current(self, file_path, zip_path): + """ + Return True if the file_path is current for this zip_path + """ + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + if not os.path.isfile(file_path): + return False + stat = os.stat(file_path) + if stat.st_size != size or stat.st_mtime != timestamp: + return False + # check that the contents match + zip_contents = self.loader.get_data(zip_path) + with open(file_path, 'rb') as f: + file_contents = f.read() + return zip_contents == file_contents + + def _get_eager_resources(self): + if self.eagers is None: + eagers = [] + for name in ('native_libs.txt', 'eager_resources.txt'): + if self.has_metadata(name): + eagers.extend(self.get_metadata_lines(name)) + self.eagers = eagers + return self.eagers + + def _index(self): + try: + return self._dirindex + except AttributeError: + ind = {} + for path in self.zipinfo: + parts = path.split(os.sep) + while parts: + parent = os.sep.join(parts[:-1]) + if parent in ind: + ind[parent].append(parts[-1]) + break + else: + ind[parent] = [parts.pop()] + self._dirindex = ind + return ind + + def _has(self, fspath): + zip_path = self._zipinfo_name(fspath) + return zip_path in self.zipinfo or zip_path in self._index() + + def _isdir(self, fspath): + return self._zipinfo_name(fspath) in self._index() + + def _listdir(self, fspath): + return list(self._index().get(self._zipinfo_name(fspath), ())) + + def _eager_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.egg_root, resource_name)) + + def _resource_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.module_path, resource_name)) + + +register_loader_type(zipimport.zipimporter, ZipProvider) + + +class FileMetadata(EmptyProvider): + """Metadata handler for standalone PKG-INFO files + + Usage:: + + metadata = FileMetadata("/path/to/PKG-INFO") + + This provider rejects all data and metadata requests except for PKG-INFO, + which is treated as existing, and will be the contents of the file at + the provided location. + """ + + def __init__(self, path): + self.path = path + + def _get_metadata_path(self, name): + return self.path + + def has_metadata(self, name): + return name == 'PKG-INFO' and os.path.isfile(self.path) + + def get_metadata(self, name): + if name != 'PKG-INFO': + raise KeyError("No metadata except PKG-INFO is available") + + with io.open(self.path, encoding='utf-8', errors="replace") as f: + metadata = f.read() + self._warn_on_replacement(metadata) + return metadata + + def _warn_on_replacement(self, metadata): + # Python 2.7 compat for: replacement_char = '�' + replacement_char = b'\xef\xbf\xbd'.decode('utf-8') + if replacement_char in metadata: + tmpl = "{self.path} could not be properly decoded in UTF-8" + msg = tmpl.format(**locals()) + warnings.warn(msg) + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + +class PathMetadata(DefaultProvider): + """Metadata provider for egg directories + + Usage:: + + # Development eggs: + + egg_info = "/path/to/PackageName.egg-info" + base_dir = os.path.dirname(egg_info) + metadata = PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + dist = Distribution(basedir, project_name=dist_name, metadata=metadata) + + # Unpacked egg directories: + + egg_path = "/path/to/PackageName-ver-pyver-etc.egg" + metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) + dist = Distribution.from_filename(egg_path, metadata=metadata) + """ + + def __init__(self, path, egg_info): + self.module_path = path + self.egg_info = egg_info + + +class EggMetadata(ZipProvider): + """Metadata provider for .egg files""" + + def __init__(self, importer): + """Create a metadata provider from a zipimporter""" + + self.zip_pre = importer.archive + os.sep + self.loader = importer + if importer.prefix: + self.module_path = os.path.join(importer.archive, importer.prefix) + else: + self.module_path = importer.archive + self._setup_prefix() + + +_declare_state('dict', _distribution_finders={}) + + +def register_finder(importer_type, distribution_finder): + """Register `distribution_finder` to find distributions in sys.path items + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `distribution_finder` is a callable that, passed a path + item and the importer instance, yields ``Distribution`` instances found on + that path item. See ``pkg_resources.find_on_path`` for an example.""" + _distribution_finders[importer_type] = distribution_finder + + +def find_distributions(path_item, only=False): + """Yield distributions accessible via `path_item`""" + importer = get_importer(path_item) + finder = _find_adapter(_distribution_finders, importer) + return finder(importer, path_item, only) + + +def find_eggs_in_zip(importer, path_item, only=False): + """ + Find eggs in zip files; possibly multiple nested eggs. + """ + if importer.archive.endswith('.whl'): + # wheels are not supported with this finder + # they don't have PKG-INFO metadata, and won't ever contain eggs + return + metadata = EggMetadata(importer) + if metadata.has_metadata('PKG-INFO'): + yield Distribution.from_filename(path_item, metadata=metadata) + if only: + # don't yield nested distros + return + for subitem in metadata.resource_listdir(''): + if _is_egg_path(subitem): + subpath = os.path.join(path_item, subitem) + dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) + for dist in dists: + yield dist + elif subitem.lower().endswith('.dist-info'): + subpath = os.path.join(path_item, subitem) + submeta = EggMetadata(zipimport.zipimporter(subpath)) + submeta.egg_info = subpath + yield Distribution.from_location(path_item, subitem, submeta) + + +register_finder(zipimport.zipimporter, find_eggs_in_zip) + + +def find_nothing(importer, path_item, only=False): + return () + + +register_finder(object, find_nothing) + + +def _by_version_descending(names): + """ + Given a list of filenames, return them in descending order + by version number. + + >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' + >>> _by_version_descending(names) + ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] + >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' + >>> _by_version_descending(names) + ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] + """ + def _by_version(name): + """ + Parse each component of the filename + """ + name, ext = os.path.splitext(name) + parts = itertools.chain(name.split('-'), [ext]) + return [packaging.version.parse(part) for part in parts] + + return sorted(names, key=_by_version, reverse=True) + + +def find_on_path(importer, path_item, only=False): + """Yield distributions accessible on a sys.path directory""" + path_item = _normalize_cached(path_item) + + if _is_unpacked_egg(path_item): + yield Distribution.from_filename( + path_item, metadata=PathMetadata( + path_item, os.path.join(path_item, 'EGG-INFO') + ) + ) + return + + entries = safe_listdir(path_item) + + # for performance, before sorting by version, + # screen entries for only those that will yield + # distributions + filtered = ( + entry + for entry in entries + if dist_factory(path_item, entry, only) + ) + + # scan for .egg and .egg-info in directory + path_item_entries = _by_version_descending(filtered) + for entry in path_item_entries: + fullpath = os.path.join(path_item, entry) + factory = dist_factory(path_item, entry, only) + for dist in factory(fullpath): + yield dist + + +def dist_factory(path_item, entry, only): + """ + Return a dist_factory for a path_item and entry + """ + lower = entry.lower() + is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) + return ( + distributions_from_metadata + if is_meta else + find_distributions + if not only and _is_egg_path(entry) else + resolve_egg_link + if not only and lower.endswith('.egg-link') else + NoDists() + ) + + +class NoDists: + """ + >>> bool(NoDists()) + False + + >>> list(NoDists()('anything')) + [] + """ + def __bool__(self): + return False + if six.PY2: + __nonzero__ = __bool__ + + def __call__(self, fullpath): + return iter(()) + + +def safe_listdir(path): + """ + Attempt to list contents of path, but suppress some exceptions. + """ + try: + return os.listdir(path) + except (PermissionError, NotADirectoryError): + pass + except OSError as e: + # Ignore the directory if does not exist, not a directory or + # permission denied + ignorable = ( + e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) + # Python 2 on Windows needs to be handled this way :( + or getattr(e, "winerror", None) == 267 + ) + if not ignorable: + raise + return () + + +def distributions_from_metadata(path): + root = os.path.dirname(path) + if os.path.isdir(path): + if len(os.listdir(path)) == 0: + # empty metadata dir; skip + return + metadata = PathMetadata(root, path) + else: + metadata = FileMetadata(path) + entry = os.path.basename(path) + yield Distribution.from_location( + root, entry, metadata, precedence=DEVELOP_DIST, + ) + + +def non_empty_lines(path): + """ + Yield non-empty lines from file at path + """ + with open(path) as f: + for line in f: + line = line.strip() + if line: + yield line + + +def resolve_egg_link(path): + """ + Given a path to an .egg-link, resolve distributions + present in the referenced path. + """ + referenced_paths = non_empty_lines(path) + resolved_paths = ( + os.path.join(os.path.dirname(path), ref) + for ref in referenced_paths + ) + dist_groups = map(find_distributions, resolved_paths) + return next(dist_groups, ()) + + +register_finder(pkgutil.ImpImporter, find_on_path) + +if hasattr(importlib_machinery, 'FileFinder'): + register_finder(importlib_machinery.FileFinder, find_on_path) + +_declare_state('dict', _namespace_handlers={}) +_declare_state('dict', _namespace_packages={}) + + +def register_namespace_handler(importer_type, namespace_handler): + """Register `namespace_handler` to declare namespace packages + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `namespace_handler` is a callable like this:: + + def namespace_handler(importer, path_entry, moduleName, module): + # return a path_entry to use for child packages + + Namespace handlers are only called if the importer object has already + agreed that it can handle the relevant path item, and they should only + return a subpath if the module __path__ does not already contain an + equivalent subpath. For an example namespace handler, see + ``pkg_resources.file_ns_handler``. + """ + _namespace_handlers[importer_type] = namespace_handler + + +def _handle_ns(packageName, path_item): + """Ensure that named package includes a subpath of path_item (if needed)""" + + importer = get_importer(path_item) + if importer is None: + return None + + # capture warnings due to #1111 + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + loader = importer.find_module(packageName) + + if loader is None: + return None + module = sys.modules.get(packageName) + if module is None: + module = sys.modules[packageName] = types.ModuleType(packageName) + module.__path__ = [] + _set_parent_ns(packageName) + elif not hasattr(module, '__path__'): + raise TypeError("Not a package:", packageName) + handler = _find_adapter(_namespace_handlers, importer) + subpath = handler(importer, path_item, packageName, module) + if subpath is not None: + path = module.__path__ + path.append(subpath) + loader.load_module(packageName) + _rebuild_mod_path(path, packageName, module) + return subpath + + +def _rebuild_mod_path(orig_path, package_name, module): + """ + Rebuild module.__path__ ensuring that all entries are ordered + corresponding to their sys.path order + """ + sys_path = [_normalize_cached(p) for p in sys.path] + + def safe_sys_path_index(entry): + """ + Workaround for #520 and #513. + """ + try: + return sys_path.index(entry) + except ValueError: + return float('inf') + + def position_in_sys_path(path): + """ + Return the ordinal of the path based on its position in sys.path + """ + path_parts = path.split(os.sep) + module_parts = package_name.count('.') + 1 + parts = path_parts[:-module_parts] + return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) + + new_path = sorted(orig_path, key=position_in_sys_path) + new_path = [_normalize_cached(p) for p in new_path] + + if isinstance(module.__path__, list): + module.__path__[:] = new_path + else: + module.__path__ = new_path + + +def declare_namespace(packageName): + """Declare that package 'packageName' is a namespace package""" + + _imp.acquire_lock() + try: + if packageName in _namespace_packages: + return + + path = sys.path + parent, _, _ = packageName.rpartition('.') + + if parent: + declare_namespace(parent) + if parent not in _namespace_packages: + __import__(parent) + try: + path = sys.modules[parent].__path__ + except AttributeError: + raise TypeError("Not a package:", parent) + + # Track what packages are namespaces, so when new path items are added, + # they can be updated + _namespace_packages.setdefault(parent or None, []).append(packageName) + _namespace_packages.setdefault(packageName, []) + + for path_item in path: + # Ensure all the parent's path items are reflected in the child, + # if they apply + _handle_ns(packageName, path_item) + + finally: + _imp.release_lock() + + +def fixup_namespace_packages(path_item, parent=None): + """Ensure that previously-declared namespace packages include path_item""" + _imp.acquire_lock() + try: + for package in _namespace_packages.get(parent, ()): + subpath = _handle_ns(package, path_item) + if subpath: + fixup_namespace_packages(subpath, package) + finally: + _imp.release_lock() + + +def file_ns_handler(importer, path_item, packageName, module): + """Compute an ns-package subpath for a filesystem or zipfile importer""" + + subpath = os.path.join(path_item, packageName.split('.')[-1]) + normalized = _normalize_cached(subpath) + for item in module.__path__: + if _normalize_cached(item) == normalized: + break + else: + # Only return the path if it's not already there + return subpath + + +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) + +if hasattr(importlib_machinery, 'FileFinder'): + register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) + + +def null_ns_handler(importer, path_item, packageName, module): + return None + + +register_namespace_handler(object, null_ns_handler) + + +def normalize_path(filename): + """Normalize a file/dir name for comparison purposes""" + return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) + + +def _cygwin_patch(filename): # pragma: nocover + """ + Contrary to POSIX 2008, on Cygwin, getcwd (3) contains + symlink components. Using + os.path.abspath() works around this limitation. A fix in os.getcwd() + would probably better, in Cygwin even more so, except + that this seems to be by design... + """ + return os.path.abspath(filename) if sys.platform == 'cygwin' else filename + + +def _normalize_cached(filename, _cache={}): + try: + return _cache[filename] + except KeyError: + _cache[filename] = result = normalize_path(filename) + return result + + +def _is_egg_path(path): + """ + Determine if given path appears to be an egg. + """ + return path.lower().endswith('.egg') + + +def _is_unpacked_egg(path): + """ + Determine if given path appears to be an unpacked egg. + """ + return ( + _is_egg_path(path) and + os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) + ) + + +def _set_parent_ns(packageName): + parts = packageName.split('.') + name = parts.pop() + if parts: + parent = '.'.join(parts) + setattr(sys.modules[parent], name, sys.modules[packageName]) + + +def yield_lines(strs): + """Yield non-empty/non-comment lines of a string or sequence""" + if isinstance(strs, six.string_types): + for s in strs.splitlines(): + s = s.strip() + # skip blank lines/comments + if s and not s.startswith('#'): + yield s + else: + for ss in strs: + for s in yield_lines(ss): + yield s + + +MODULE = re.compile(r"\w+(\.\w+)*$").match +EGG_NAME = re.compile( + r""" + (?P<name>[^-]+) ( + -(?P<ver>[^-]+) ( + -py(?P<pyver>[^-]+) ( + -(?P<plat>.+) + )? + )? + )? + """, + re.VERBOSE | re.IGNORECASE, +).match + + +class EntryPoint: + """Object representing an advertised importable object""" + + def __init__(self, name, module_name, attrs=(), extras=(), dist=None): + if not MODULE(module_name): + raise ValueError("Invalid module name", module_name) + self.name = name + self.module_name = module_name + self.attrs = tuple(attrs) + self.extras = tuple(extras) + self.dist = dist + + def __str__(self): + s = "%s = %s" % (self.name, self.module_name) + if self.attrs: + s += ':' + '.'.join(self.attrs) + if self.extras: + s += ' [%s]' % ','.join(self.extras) + return s + + def __repr__(self): + return "EntryPoint.parse(%r)" % str(self) + + def load(self, require=True, *args, **kwargs): + """ + Require packages for this EntryPoint, then resolve it. + """ + if not require or args or kwargs: + warnings.warn( + "Parameters to load are deprecated. Call .resolve and " + ".require separately.", + PkgResourcesDeprecationWarning, + stacklevel=2, + ) + if require: + self.require(*args, **kwargs) + return self.resolve() + + def resolve(self): + """ + Resolve the entry point from its module and attrs. + """ + module = __import__(self.module_name, fromlist=['__name__'], level=0) + try: + return functools.reduce(getattr, self.attrs, module) + except AttributeError as exc: + raise ImportError(str(exc)) + + def require(self, env=None, installer=None): + if self.extras and not self.dist: + raise UnknownExtra("Can't require() without a distribution", self) + + # Get the requirements for this entry point with all its extras and + # then resolve them. We have to pass `extras` along when resolving so + # that the working set knows what extras we want. Otherwise, for + # dist-info distributions, the working set will assume that the + # requirements for that extra are purely optional and skip over them. + reqs = self.dist.requires(self.extras) + items = working_set.resolve(reqs, env, installer, extras=self.extras) + list(map(working_set.add, items)) + + pattern = re.compile( + r'\s*' + r'(?P<name>.+?)\s*' + r'=\s*' + r'(?P<module>[\w.]+)\s*' + r'(:\s*(?P<attr>[\w.]+))?\s*' + r'(?P<extras>\[.*\])?\s*$' + ) + + @classmethod + def parse(cls, src, dist=None): + """Parse a single entry point from string `src` + + Entry point syntax follows the form:: + + name = some.module:some.attr [extra1, extra2] + + The entry name and module name are required, but the ``:attrs`` and + ``[extras]`` parts are optional + """ + m = cls.pattern.match(src) + if not m: + msg = "EntryPoint must be in 'name=module:attrs [extras]' format" + raise ValueError(msg, src) + res = m.groupdict() + extras = cls._parse_extras(res['extras']) + attrs = res['attr'].split('.') if res['attr'] else () + return cls(res['name'], res['module'], attrs, extras, dist) + + @classmethod + def _parse_extras(cls, extras_spec): + if not extras_spec: + return () + req = Requirement.parse('x' + extras_spec) + if req.specs: + raise ValueError() + return req.extras + + @classmethod + def parse_group(cls, group, lines, dist=None): + """Parse an entry point group""" + if not MODULE(group): + raise ValueError("Invalid group name", group) + this = {} + for line in yield_lines(lines): + ep = cls.parse(line, dist) + if ep.name in this: + raise ValueError("Duplicate entry point", group, ep.name) + this[ep.name] = ep + return this + + @classmethod + def parse_map(cls, data, dist=None): + """Parse a map of entry point groups""" + if isinstance(data, dict): + data = data.items() + else: + data = split_sections(data) + maps = {} + for group, lines in data: + if group is None: + if not lines: + continue + raise ValueError("Entry points must be listed in groups") + group = group.strip() + if group in maps: + raise ValueError("Duplicate group name", group) + maps[group] = cls.parse_group(group, lines, dist) + return maps + + +def _remove_md5_fragment(location): + if not location: + return '' + parsed = urllib.parse.urlparse(location) + if parsed[-1].startswith('md5='): + return urllib.parse.urlunparse(parsed[:-1] + ('',)) + return location + + +def _version_from_file(lines): + """ + Given an iterable of lines from a Metadata file, return + the value of the Version field, if present, or None otherwise. + """ + def is_version_line(line): + return line.lower().startswith('version:') + version_lines = filter(is_version_line, lines) + line = next(iter(version_lines), '') + _, _, value = line.partition(':') + return safe_version(value.strip()) or None + + +class Distribution: + """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' + + def __init__( + self, location=None, metadata=None, project_name=None, + version=None, py_version=PY_MAJOR, platform=None, + precedence=EGG_DIST): + self.project_name = safe_name(project_name or 'Unknown') + if version is not None: + self._version = safe_version(version) + self.py_version = py_version + self.platform = platform + self.location = location + self.precedence = precedence + self._provider = metadata or empty_provider + + @classmethod + def from_location(cls, location, basename, metadata=None, **kw): + project_name, version, py_version, platform = [None] * 4 + basename, ext = os.path.splitext(basename) + if ext.lower() in _distributionImpl: + cls = _distributionImpl[ext.lower()] + + match = EGG_NAME(basename) + if match: + project_name, version, py_version, platform = match.group( + 'name', 'ver', 'pyver', 'plat' + ) + return cls( + location, metadata, project_name=project_name, version=version, + py_version=py_version, platform=platform, **kw + )._reload_version() + + def _reload_version(self): + return self + + @property + def hashcmp(self): + return ( + self.parsed_version, + self.precedence, + self.key, + _remove_md5_fragment(self.location), + self.py_version or '', + self.platform or '', + ) + + def __hash__(self): + return hash(self.hashcmp) + + def __lt__(self, other): + return self.hashcmp < other.hashcmp + + def __le__(self, other): + return self.hashcmp <= other.hashcmp + + def __gt__(self, other): + return self.hashcmp > other.hashcmp + + def __ge__(self, other): + return self.hashcmp >= other.hashcmp + + def __eq__(self, other): + if not isinstance(other, self.__class__): + # It's not a Distribution, so they are not equal + return False + return self.hashcmp == other.hashcmp + + def __ne__(self, other): + return not self == other + + # These properties have to be lazy so that we don't have to load any + # metadata until/unless it's actually needed. (i.e., some distributions + # may not know their name or version without loading PKG-INFO) + + @property + def key(self): + try: + return self._key + except AttributeError: + self._key = key = self.project_name.lower() + return key + + @property + def parsed_version(self): + if not hasattr(self, "_parsed_version"): + self._parsed_version = parse_version(self.version) + + return self._parsed_version + + def _warn_legacy_version(self): + LV = packaging.version.LegacyVersion + is_legacy = isinstance(self._parsed_version, LV) + if not is_legacy: + return + + # While an empty version is technically a legacy version and + # is not a valid PEP 440 version, it's also unlikely to + # actually come from someone and instead it is more likely that + # it comes from setuptools attempting to parse a filename and + # including it in the list. So for that we'll gate this warning + # on if the version is anything at all or not. + if not self.version: + return + + tmpl = textwrap.dedent(""" + '{project_name} ({version})' is being parsed as a legacy, + non PEP 440, + version. You may find odd behavior and sort order. + In particular it will be sorted as less than 0.0. It + is recommended to migrate to PEP 440 compatible + versions. + """).strip().replace('\n', ' ') + + warnings.warn(tmpl.format(**vars(self)), PEP440Warning) + + @property + def version(self): + try: + return self._version + except AttributeError: + version = self._get_version() + if version is None: + path = self._get_metadata_path_for_display(self.PKG_INFO) + msg = ( + "Missing 'Version:' header and/or {} file at path: {}" + ).format(self.PKG_INFO, path) + raise ValueError(msg, self) + + return version + + @property + def _dep_map(self): + """ + A map of extra to its list of (direct) requirements + for this distribution, including the null extra. + """ + try: + return self.__dep_map + except AttributeError: + self.__dep_map = self._filter_extras(self._build_dep_map()) + return self.__dep_map + + @staticmethod + def _filter_extras(dm): + """ + Given a mapping of extras to dependencies, strip off + environment markers and filter out any dependencies + not matching the markers. + """ + for extra in list(filter(None, dm)): + new_extra = extra + reqs = dm.pop(extra) + new_extra, _, marker = extra.partition(':') + fails_marker = marker and ( + invalid_marker(marker) + or not evaluate_marker(marker) + ) + if fails_marker: + reqs = [] + new_extra = safe_extra(new_extra) or None + + dm.setdefault(new_extra, []).extend(reqs) + return dm + + def _build_dep_map(self): + dm = {} + for name in 'requires.txt', 'depends.txt': + for extra, reqs in split_sections(self._get_metadata(name)): + dm.setdefault(extra, []).extend(parse_requirements(reqs)) + return dm + + def requires(self, extras=()): + """List of Requirements needed for this distro if `extras` are used""" + dm = self._dep_map + deps = [] + deps.extend(dm.get(None, ())) + for ext in extras: + try: + deps.extend(dm[safe_extra(ext)]) + except KeyError: + raise UnknownExtra( + "%s has no such extra feature %r" % (self, ext) + ) + return deps + + def _get_metadata_path_for_display(self, name): + """ + Return the path to the given metadata file, if available. + """ + try: + # We need to access _get_metadata_path() on the provider object + # directly rather than through this class's __getattr__() + # since _get_metadata_path() is marked private. + path = self._provider._get_metadata_path(name) + + # Handle exceptions e.g. in case the distribution's metadata + # provider doesn't support _get_metadata_path(). + except Exception: + return '[could not detect]' + + return path + + def _get_metadata(self, name): + if self.has_metadata(name): + for line in self.get_metadata_lines(name): + yield line + + def _get_version(self): + lines = self._get_metadata(self.PKG_INFO) + version = _version_from_file(lines) + + return version + + def activate(self, path=None, replace=False): + """Ensure distribution is importable on `path` (default=sys.path)""" + if path is None: + path = sys.path + self.insert_on(path, replace=replace) + if path is sys.path: + fixup_namespace_packages(self.location) + for pkg in self._get_metadata('namespace_packages.txt'): + if pkg in sys.modules: + declare_namespace(pkg) + + def egg_name(self): + """Return what this distribution's standard .egg filename should be""" + filename = "%s-%s-py%s" % ( + to_filename(self.project_name), to_filename(self.version), + self.py_version or PY_MAJOR + ) + + if self.platform: + filename += '-' + self.platform + return filename + + def __repr__(self): + if self.location: + return "%s (%s)" % (self, self.location) + else: + return str(self) + + def __str__(self): + try: + version = getattr(self, 'version', None) + except ValueError: + version = None + version = version or "[unknown version]" + return "%s %s" % (self.project_name, version) + + def __getattr__(self, attr): + """Delegate all unrecognized public attributes to .metadata provider""" + if attr.startswith('_'): + raise AttributeError(attr) + return getattr(self._provider, attr) + + def __dir__(self): + return list( + set(super(Distribution, self).__dir__()) + | set( + attr for attr in self._provider.__dir__() + if not attr.startswith('_') + ) + ) + + if not hasattr(object, '__dir__'): + # python 2.7 not supported + del __dir__ + + @classmethod + def from_filename(cls, filename, metadata=None, **kw): + return cls.from_location( + _normalize_cached(filename), os.path.basename(filename), metadata, + **kw + ) + + def as_requirement(self): + """Return a ``Requirement`` that matches this distribution exactly""" + if isinstance(self.parsed_version, packaging.version.Version): + spec = "%s==%s" % (self.project_name, self.parsed_version) + else: + spec = "%s===%s" % (self.project_name, self.parsed_version) + + return Requirement.parse(spec) + + def load_entry_point(self, group, name): + """Return the `name` entry point of `group` or raise ImportError""" + ep = self.get_entry_info(group, name) + if ep is None: + raise ImportError("Entry point %r not found" % ((group, name),)) + return ep.load() + + def get_entry_map(self, group=None): + """Return the entry point map for `group`, or the full entry map""" + try: + ep_map = self._ep_map + except AttributeError: + ep_map = self._ep_map = EntryPoint.parse_map( + self._get_metadata('entry_points.txt'), self + ) + if group is not None: + return ep_map.get(group, {}) + return ep_map + + def get_entry_info(self, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return self.get_entry_map(group).get(name) + + def insert_on(self, path, loc=None, replace=False): + """Ensure self.location is on path + + If replace=False (default): + - If location is already in path anywhere, do nothing. + - Else: + - If it's an egg and its parent directory is on path, + insert just ahead of the parent. + - Else: add to the end of path. + If replace=True: + - If location is already on path anywhere (not eggs) + or higher priority than its parent (eggs) + do nothing. + - Else: + - If it's an egg and its parent directory is on path, + insert just ahead of the parent, + removing any lower-priority entries. + - Else: add it to the front of path. + """ + + loc = loc or self.location + if not loc: + return + + nloc = _normalize_cached(loc) + bdir = os.path.dirname(nloc) + npath = [(p and _normalize_cached(p) or p) for p in path] + + for p, item in enumerate(npath): + if item == nloc: + if replace: + break + else: + # don't modify path (even removing duplicates) if + # found and not replace + return + elif item == bdir and self.precedence == EGG_DIST: + # if it's an .egg, give it precedence over its directory + # UNLESS it's already been added to sys.path and replace=False + if (not replace) and nloc in npath[p:]: + return + if path is sys.path: + self.check_version_conflict() + path.insert(p, loc) + npath.insert(p, nloc) + break + else: + if path is sys.path: + self.check_version_conflict() + if replace: + path.insert(0, loc) + else: + path.append(loc) + return + + # p is the spot where we found or inserted loc; now remove duplicates + while True: + try: + np = npath.index(nloc, p + 1) + except ValueError: + break + else: + del npath[np], path[np] + # ha! + p = np + + return + + def check_version_conflict(self): + if self.key == 'setuptools': + # ignore the inevitable setuptools self-conflicts :( + return + + nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) + loc = normalize_path(self.location) + for modname in self._get_metadata('top_level.txt'): + if (modname not in sys.modules or modname in nsp + or modname in _namespace_packages): + continue + if modname in ('pkg_resources', 'setuptools', 'site'): + continue + fn = getattr(sys.modules[modname], '__file__', None) + if fn and (normalize_path(fn).startswith(loc) or + fn.startswith(self.location)): + continue + issue_warning( + "Module %s was already imported from %s, but %s is being added" + " to sys.path" % (modname, fn, self.location), + ) + + def has_version(self): + try: + self.version + except ValueError: + issue_warning("Unbuilt egg for " + repr(self)) + return False + return True + + def clone(self, **kw): + """Copy this distribution, substituting in any changed keyword args""" + names = 'project_name version py_version platform location precedence' + for attr in names.split(): + kw.setdefault(attr, getattr(self, attr, None)) + kw.setdefault('metadata', self._provider) + return self.__class__(**kw) + + @property + def extras(self): + return [dep for dep in self._dep_map if dep] + + +class EggInfoDistribution(Distribution): + def _reload_version(self): + """ + Packages installed by distutils (e.g. numpy or scipy), + which uses an old safe_version, and so + their version numbers can get mangled when + converted to filenames (e.g., 1.11.0.dev0+2329eae to + 1.11.0.dev0_2329eae). These distributions will not be + parsed properly + downstream by Distribution and safe_version, so + take an extra step and try to get the version number from + the metadata file itself instead of the filename. + """ + md_version = self._get_version() + if md_version: + self._version = md_version + return self + + +class DistInfoDistribution(Distribution): + """ + Wrap an actual or potential sys.path entry + w/metadata, .dist-info style. + """ + PKG_INFO = 'METADATA' + EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") + + @property + def _parsed_pkg_info(self): + """Parse and cache metadata""" + try: + return self._pkg_info + except AttributeError: + metadata = self.get_metadata(self.PKG_INFO) + self._pkg_info = email.parser.Parser().parsestr(metadata) + return self._pkg_info + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + self.__dep_map = self._compute_dependencies() + return self.__dep_map + + def _compute_dependencies(self): + """Recompute this distribution's dependencies.""" + dm = self.__dep_map = {None: []} + + reqs = [] + # Including any condition expressions + for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: + reqs.extend(parse_requirements(req)) + + def reqs_for_extra(extra): + for req in reqs: + if not req.marker or req.marker.evaluate({'extra': extra}): + yield req + + common = frozenset(reqs_for_extra(None)) + dm[None].extend(common) + + for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: + s_extra = safe_extra(extra.strip()) + dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) + + return dm + + +_distributionImpl = { + '.egg': Distribution, + '.egg-info': EggInfoDistribution, + '.dist-info': DistInfoDistribution, +} + + +def issue_warning(*args, **kw): + level = 1 + g = globals() + try: + # find the first stack frame that is *not* code in + # the pkg_resources module, to use for the warning + while sys._getframe(level).f_globals is g: + level += 1 + except ValueError: + pass + warnings.warn(stacklevel=level + 1, *args, **kw) + + +class RequirementParseError(ValueError): + def __str__(self): + return ' '.join(self.args) + + +def parse_requirements(strs): + """Yield ``Requirement`` objects for each specification in `strs` + + `strs` must be a string, or a (possibly-nested) iterable thereof. + """ + # create a steppable iterator, so we can handle \-continuations + lines = iter(yield_lines(strs)) + + for line in lines: + # Drop comments -- a hash without a space may be in a URL. + if ' #' in line: + line = line[:line.find(' #')] + # If there is a line continuation, drop it, and append the next line. + if line.endswith('\\'): + line = line[:-2].strip() + try: + line += next(lines) + except StopIteration: + return + yield Requirement(line) + + +class Requirement(packaging.requirements.Requirement): + def __init__(self, requirement_string): + """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" + try: + super(Requirement, self).__init__(requirement_string) + except packaging.requirements.InvalidRequirement as e: + raise RequirementParseError(str(e)) + self.unsafe_name = self.name + project_name = safe_name(self.name) + self.project_name, self.key = project_name, project_name.lower() + self.specs = [ + (spec.operator, spec.version) for spec in self.specifier] + self.extras = tuple(map(safe_extra, self.extras)) + self.hashCmp = ( + self.key, + self.url, + self.specifier, + frozenset(self.extras), + str(self.marker) if self.marker else None, + ) + self.__hash = hash(self.hashCmp) + + def __eq__(self, other): + return ( + isinstance(other, Requirement) and + self.hashCmp == other.hashCmp + ) + + def __ne__(self, other): + return not self == other + + def __contains__(self, item): + if isinstance(item, Distribution): + if item.key != self.key: + return False + + item = item.version + + # Allow prereleases always in order to match the previous behavior of + # this method. In the future this should be smarter and follow PEP 440 + # more accurately. + return self.specifier.contains(item, prereleases=True) + + def __hash__(self): + return self.__hash + + def __repr__(self): + return "Requirement.parse(%r)" % str(self) + + @staticmethod + def parse(s): + req, = parse_requirements(s) + return req + + +def _always_object(classes): + """ + Ensure object appears in the mro even + for old-style classes. + """ + if object not in classes: + return classes + (object,) + return classes + + +def _find_adapter(registry, ob): + """Return an adapter factory for `ob` from `registry`""" + types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) + for t in types: + if t in registry: + return registry[t] + + +def ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + py31compat.makedirs(dirname, exist_ok=True) + + +def _bypass_ensure_directory(path): + """Sandbox-bypassing version of ensure_directory()""" + if not WRITE_SUPPORT: + raise IOError('"os.mkdir" not supported on this platform.') + dirname, filename = split(path) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + try: + mkdir(dirname, 0o755) + except FileExistsError: + pass + + +def split_sections(s): + """Split a string or iterable thereof into (section, content) pairs + + Each ``section`` is a stripped version of the section header ("[section]") + and each ``content`` is a list of stripped lines excluding blank lines and + comment-only lines. If there are any such lines before the first section + header, they're returned in a first ``section`` of ``None``. + """ + section = None + content = [] + for line in yield_lines(s): + if line.startswith("["): + if line.endswith("]"): + if section or content: + yield section, content + section = line[1:-1].strip() + content = [] + else: + raise ValueError("Invalid section heading", line) + else: + content.append(line) + + # wrap up last segment + yield section, content + + +def _mkstemp(*args, **kw): + old_open = os.open + try: + # temporarily bypass sandboxing + os.open = os_open + return tempfile.mkstemp(*args, **kw) + finally: + # and then put it back + os.open = old_open + + +# Silence the PEP440Warning by default, so that end users don't get hit by it +# randomly just because they use pkg_resources. We want to append the rule +# because we want earlier uses of filterwarnings to take precedence over this +# one. +warnings.filterwarnings("ignore", category=PEP440Warning, append=True) + + +# from jaraco.functools 1.3 +def _call_aside(f, *args, **kwargs): + f(*args, **kwargs) + return f + + +@_call_aside +def _initialize(g=globals()): + "Set up global resource manager (deliberately not state-saved)" + manager = ResourceManager() + g['_manager'] = manager + g.update( + (name, getattr(manager, name)) + for name in dir(manager) + if not name.startswith('_') + ) + + +@_call_aside +def _initialize_master_working_set(): + """ + Prepare the master working set and make the ``require()`` + API available. + + This function has explicit effects on the global state + of pkg_resources. It is intended to be invoked once at + the initialization of this module. + + Invocation by other packages is unsupported and done + at their own risk. + """ + working_set = WorkingSet._build_master() + _declare_state('object', working_set=working_set) + + require = working_set.require + iter_entry_points = working_set.iter_entry_points + add_activation_listener = working_set.subscribe + run_script = working_set.run_script + # backward compatibility + run_main = run_script + # Activate all distributions already on sys.path with replace=False and + # ensure that all distributions added to the working set in the future + # (e.g. by calling ``require()``) will get activated as well, + # with higher priority (replace=True). + tuple( + dist.activate(replace=False) + for dist in working_set + ) + add_activation_listener( + lambda dist: dist.activate(replace=True), + existing=False, + ) + working_set.entries = [] + # match order + list(map(working_set.add_entry, sys.path)) + globals().update(locals()) + +class PkgResourcesDeprecationWarning(Warning): + """ + Base class for warning about deprecations in ``pkg_resources`` + + This class is not derived from ``DeprecationWarning``, and as such is + visible by default. + """ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5caf50a9e3be1902e4470c265d5fadb07a4ed586 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/__pycache__/py31compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/__pycache__/py31compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b81086940139423ac05eb69da0e4cfdd2b264e2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/__pycache__/py31compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__init__.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..07458be9ddde30bbe39d17879abe67f5ad5d669e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/appdirs.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/appdirs.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..20c3f615ba6d8ca120234843e6fa7b98ab1bf652 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/appdirs.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aed76d789fdc276bb4796df5eaaac45a48a1184b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/pyparsing.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/six.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/six.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2c2b7c4436a252dd2e1538bcccd0ff1571f043f4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/__pycache__/six.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/appdirs.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/appdirs.py new file mode 100644 index 0000000000000000000000000000000000000000..ae67001af8b661373edeee2eb327b9f63e630d62 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/appdirs.py @@ -0,0 +1,608 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2005-2010 ActiveState Software Inc. +# Copyright (c) 2013 Eddy Petrișor + +"""Utilities for determining application-specific dirs. + +See <http://github.com/ActiveState/appdirs> for details and usage. +""" +# Dev Notes: +# - MSDN on where to store app data files: +# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120 +# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html +# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html + +__version_info__ = (1, 4, 3) +__version__ = '.'.join(map(str, __version_info__)) + + +import sys +import os + +PY3 = sys.version_info[0] == 3 + +if PY3: + unicode = str + +if sys.platform.startswith('java'): + import platform + os_name = platform.java_ver()[3][0] + if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc. + system = 'win32' + elif os_name.startswith('Mac'): # "Mac OS X", etc. + system = 'darwin' + else: # "Linux", "SunOS", "FreeBSD", etc. + # Setting this to "linux2" is not ideal, but only Windows or Mac + # are actually checked for and the rest of the module expects + # *sys.platform* style strings. + system = 'linux2' +else: + system = sys.platform + + + +def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/<AppName> + Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> + Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> + Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> + Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/<AppName>". + """ + if system == "win32": + if appauthor is None: + appauthor = appname + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.normpath(_get_win_folder(const)) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('~/Library/Application Support/') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of data dirs should be + returned. By default, the first item from XDG_DATA_DIRS is + returned, or '/usr/local/share/<AppName>', + if XDG_DATA_DIRS is not set + + Typical site data directories are: + Mac OS X: /Library/Application Support/<AppName> + Unix: /usr/local/share/<AppName> or /usr/share/<AppName> + Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. + + For Unix, this is using the $XDG_DATA_DIRS[0] default. + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + elif system == 'darwin': + path = os.path.expanduser('/Library/Application Support') + if appname: + path = os.path.join(path, appname) + else: + # XDG default for $XDG_DATA_DIRS + # only first, if multipath is False + path = os.getenv('XDG_DATA_DIRS', + os.pathsep.join(['/usr/local/share', '/usr/share'])) + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + if appname and version: + path = os.path.join(path, version) + return path + + +def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user config directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by default "~/.config/<AppName>". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): + r"""Return full path to the user-shared data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "multipath" is an optional parameter only applicable to *nix + which indicates that the entire list of config dirs should be + returned. By default, the first item from XDG_CONFIG_DIRS is + returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set + + Typical site config directories are: + Mac OS X: same as site_data_dir + Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in + $XDG_CONFIG_DIRS + Win *: same as site_data_dir + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) + + For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False + + WARNING: Do not use this on Windows. See the Vista-Fail note above for why. + """ + if system in ["win32", "darwin"]: + path = site_data_dir(appname, appauthor) + if appname and version: + path = os.path.join(path, version) + else: + # XDG default for $XDG_CONFIG_DIRS + # only first, if multipath is False + path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] + if appname: + if version: + appname = os.path.join(appname, version) + pathlist = [os.sep.join([x, appname]) for x in pathlist] + + if multipath: + path = os.pathsep.join(pathlist) + else: + path = pathlist[0] + return path + + +def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Cache" to the base app data dir for Windows. See + discussion below. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/<AppName> + Unix: ~/.cache/<AppName> (XDG default) + Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache + Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go in + the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming + app data dir (the default returned by `user_data_dir` above). Apps typically + put cache data somewhere *under* the given dir here. Some examples: + ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache + ...\Acme\SuperApp\Cache\1.0 + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + This can be disabled with the `opinion=False` option. + """ + if system == "win32": + if appauthor is None: + appauthor = appname + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + if appname: + if appauthor is not False: + path = os.path.join(path, appauthor, appname) + else: + path = os.path.join(path, appname) + if opinion: + path = os.path.join(path, "Cache") + elif system == 'darwin': + path = os.path.expanduser('~/Library/Caches') + if appname: + path = os.path.join(path, appname) + else: + path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): + r"""Return full path to the user-specific state dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user state directories are: + Mac OS X: same as user_data_dir + Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined + Win *: same as user_data_dir + + For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state> + to extend the XDG spec and support $XDG_STATE_HOME. + + That means, by default "~/.local/state/<AppName>". + """ + if system in ["win32", "darwin"]: + path = user_data_dir(appname, appauthor, None, roaming) + else: + path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) + if appname: + path = os.path.join(path, appname) + if appname and version: + path = os.path.join(path, version) + return path + + +def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): + r"""Return full path to the user-specific log dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "appauthor" (only used on Windows) is the name of the + appauthor or distributing body for this application. Typically + it is the owning company name. This falls back to appname. You may + pass False to disable it. + "version" is an optional version path element to append to the + path. You might want to use this if you want multiple versions + of your app to be able to run independently. If used, this + would typically be "<major>.<minor>". + Only applied when appname is present. + "opinion" (boolean) can be False to disable the appending of + "Logs" to the base app data dir for Windows, and "log" to the + base cache dir for Unix. See discussion below. + + Typical user log directories are: + Mac OS X: ~/Library/Logs/<AppName> + Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined + Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs + Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs + + On Windows the only suggestion in the MSDN docs is that local settings + go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in + examples of what some windows apps use for a logs dir.) + + OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` + value for Windows and appends "log" to the user cache dir for Unix. + This can be disabled with the `opinion=False` option. + """ + if system == "darwin": + path = os.path.join( + os.path.expanduser('~/Library/Logs'), + appname) + elif system == "win32": + path = user_data_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "Logs") + else: + path = user_cache_dir(appname, appauthor, version) + version = False + if opinion: + path = os.path.join(path, "log") + if appname and version: + path = os.path.join(path, version) + return path + + +class AppDirs(object): + """Convenience wrapper for getting application dirs.""" + def __init__(self, appname=None, appauthor=None, version=None, + roaming=False, multipath=False): + self.appname = appname + self.appauthor = appauthor + self.version = version + self.roaming = roaming + self.multipath = multipath + + @property + def user_data_dir(self): + return user_data_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_data_dir(self): + return site_data_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_config_dir(self): + return user_config_dir(self.appname, self.appauthor, + version=self.version, roaming=self.roaming) + + @property + def site_config_dir(self): + return site_config_dir(self.appname, self.appauthor, + version=self.version, multipath=self.multipath) + + @property + def user_cache_dir(self): + return user_cache_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_state_dir(self): + return user_state_dir(self.appname, self.appauthor, + version=self.version) + + @property + def user_log_dir(self): + return user_log_dir(self.appname, self.appauthor, + version=self.version) + + +#---- internal support stuff + +def _get_win_folder_from_registry(csidl_name): + """This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + if PY3: + import winreg as _winreg + else: + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + dir, type = _winreg.QueryValueEx(key, shell_folder_name) + return dir + + +def _get_win_folder_with_pywin32(csidl_name): + from win32com.shell import shellcon, shell + dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0) + # Try to make this a unicode path because SHGetFolderPath does + # not return unicode strings when there is unicode data in the + # path. + try: + dir = unicode(dir) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + try: + import win32api + dir = win32api.GetShortPathName(dir) + except ImportError: + pass + except UnicodeError: + pass + return dir + + +def _get_win_folder_with_ctypes(csidl_name): + import ctypes + + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +def _get_win_folder_with_jna(csidl_name): + import array + from com.sun import jna + from com.sun.jna.platform import win32 + + buf_size = win32.WinDef.MAX_PATH * 2 + buf = array.zeros('c', buf_size) + shell = win32.Shell32.INSTANCE + shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf) + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in dir: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf = array.zeros('c', buf_size) + kernel = win32.Kernel32.INSTANCE + if kernel.GetShortPathName(dir, buf, buf_size): + dir = jna.Native.toString(buf.tostring()).rstrip("\0") + + return dir + +if system == "win32": + try: + import win32com.shell + _get_win_folder = _get_win_folder_with_pywin32 + except ImportError: + try: + from ctypes import windll + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + try: + import com.sun.jna + _get_win_folder = _get_win_folder_with_jna + except ImportError: + _get_win_folder = _get_win_folder_from_registry + + +#---- self test code + +if __name__ == "__main__": + appname = "MyApp" + appauthor = "MyCompany" + + props = ("user_data_dir", + "user_config_dir", + "user_cache_dir", + "user_state_dir", + "user_log_dir", + "site_data_dir", + "site_config_dir") + + print("-- app dirs %s --" % __version__) + + print("-- app dirs (with optional 'version')") + dirs = AppDirs(appname, appauthor, version="1.0") + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'version')") + dirs = AppDirs(appname, appauthor) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (without optional 'appauthor')") + dirs = AppDirs(appname) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) + + print("\n-- app dirs (with disabled 'appauthor')") + dirs = AppDirs(appname, appauthor=False) + for prop in props: + print("%s: %s" % (prop, getattr(dirs, prop))) diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__about__.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__about__.py new file mode 100644 index 0000000000000000000000000000000000000000..95d330ef823aa2e12f7846bc63c0955b25df6029 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__about__.py @@ -0,0 +1,21 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "16.8" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD or Apache License, Version 2.0" +__copyright__ = "Copyright 2014-2016 %s" % __author__ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__init__.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5ee6220203e5425f900fb5a43676c24ea377c2fa --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__init__.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +from .__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..81bd184fc229664c3d775ff00e8c292f78f66c25 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..04276d4b5d6c6976ecff7c9e04359ebcf30d9bbb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..165427f5d316fec1871fad6185f2dbb14ce16a50 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2dbece13171ae1b50a2f52c86af3edee49cfabef Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c0be576dc171d641691a976c53a3856e356c23c0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/markers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1418ff90ec33610081041ba36cc6c06cd5b8295b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8ca735619fcc78893dc165e3e431c328f94124b8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..354bf485f88ee6d59f61bb2205a8b5d625bd6774 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/utils.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a9f6a8ba63b4d9d6873b09e6b6d672d8f48fc8e6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/__pycache__/version.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_compat.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_compat.py new file mode 100644 index 0000000000000000000000000000000000000000..210bb80b7e7b64cb79f7e7cdf3e42819fe3471fe --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_compat.py @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import sys + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = str, +else: + string_types = basestring, + + +def with_metaclass(meta, *bases): + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_structures.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_structures.py new file mode 100644 index 0000000000000000000000000000000000000000..ccc27861c3a4d9efaa3db753c77c4515a627bd98 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/_structures.py @@ -0,0 +1,68 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + + +class Infinity(object): + + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + +Infinity = Infinity() + + +class NegativeInfinity(object): + + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + +NegativeInfinity = NegativeInfinity() diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/markers.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/markers.py new file mode 100644 index 0000000000000000000000000000000000000000..892e578edd4b992cc2996c31d9deb13af73d62c0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/markers.py @@ -0,0 +1,301 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import operator +import os +import platform +import sys + +from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd +from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString +from pkg_resources.extern.pyparsing import Literal as L # noqa + +from ._compat import string_types +from .specifiers import Specifier, InvalidSpecifier + + +__all__ = [ + "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", + "Marker", "default_environment", +] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node(object): + + def __init__(self, value): + self.value = value + + def __str__(self): + return str(self.value) + + def __repr__(self): + return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + + def serialize(self): + raise NotImplementedError + + +class Variable(Node): + + def serialize(self): + return str(self) + + +class Value(Node): + + def serialize(self): + return '"{0}"'.format(self) + + +class Op(Node): + + def serialize(self): + return str(self) + + +VARIABLE = ( + L("implementation_version") | + L("platform_python_implementation") | + L("implementation_name") | + L("python_full_version") | + L("platform_release") | + L("platform_version") | + L("platform_machine") | + L("platform_system") | + L("python_version") | + L("sys_platform") | + L("os_name") | + L("os.name") | # PEP-345 + L("sys.platform") | # PEP-345 + L("platform.version") | # PEP-345 + L("platform.machine") | # PEP-345 + L("platform.python_implementation") | # PEP-345 + L("python_implementation") | # undocumented setuptools legacy + L("extra") +) +ALIASES = { + 'os.name': 'os_name', + 'sys.platform': 'sys_platform', + 'platform.version': 'platform_version', + 'platform.machine': 'platform_machine', + 'platform.python_implementation': 'platform_python_implementation', + 'python_implementation': 'platform_python_implementation' +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( + L("===") | + L("==") | + L(">=") | + L("<=") | + L("!=") | + L("~=") | + L(">") | + L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results): + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker(marker, first=True): + assert isinstance(marker, (list, tuple, string_types)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if (isinstance(marker, list) and len(marker) == 1 and + isinstance(marker[0], (list, tuple))): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs, op, rhs): + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison( + "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) + ) + + return oper(lhs, rhs) + + +_undefined = object() + + +def _get_env(environment, name): + value = environment.get(name, _undefined) + + if value is _undefined: + raise UndefinedEnvironmentName( + "{0!r} does not exist in evaluation environment.".format(name) + ) + + return value + + +def _evaluate_markers(markers, environment): + groups = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info): + version = '{0.major}.{0.minor}.{0.micro}'.format(info) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + if hasattr(sys, 'implementation'): + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + iver = '0' + implementation_name = '' + + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": platform.python_version()[:3], + "sys_platform": sys.platform, + } + + +class Marker(object): + + def __init__(self, marker): + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( + marker, marker[e.loc:e.loc + 8]) + raise InvalidMarker(err_str) + + def __str__(self): + return _format_marker(self._markers) + + def __repr__(self): + return "<Marker({0!r})>".format(str(self)) + + def evaluate(self, environment=None): + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/requirements.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/requirements.py new file mode 100644 index 0000000000000000000000000000000000000000..0c8c4a3852fd37053fd552846aa7787805c30a48 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/requirements.py @@ -0,0 +1,127 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import string +import re + +from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException +from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine +from pkg_resources.extern.pyparsing import Literal as L # noqa +from pkg_resources.extern.six.moves.urllib import parse as urlparse + +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r'[^ ]+')("url") +URL = (AT + URI) + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), + joinString=",", adjacent=False)("_raw_spec") +_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start:t._original_end]) +) +MARKER_SEPERATOR = SEMICOLON +MARKER = MARKER_SEPERATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = \ + NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd + + +class Requirement(object): + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string): + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + "Invalid requirement, parse error at \"{0!r}\"".format( + requirement_string[e.loc:e.loc + 8])) + + self.name = req.name + if req.url: + parsed_url = urlparse.urlparse(req.url) + if not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc): + raise InvalidRequirement("Invalid URL given") + self.url = req.url + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) + self.specifier = SpecifierSet(req.specifier) + self.marker = req.marker if req.marker else None + + def __str__(self): + parts = [self.name] + + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append("@ {0}".format(self.url)) + + if self.marker: + parts.append("; {0}".format(self.marker)) + + return "".join(parts) + + def __repr__(self): + return "<Requirement({0!r})>".format(str(self)) diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/specifiers.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/specifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..7f5a76cfd63f47dcce29b3ea82f59d10f4e8d771 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/specifiers.py @@ -0,0 +1,774 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from .version import Version, LegacyVersion, parse + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} + + def __init__(self, spec="", prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format( + self.__class__.__name__, + str(self), + pre, + ) + + def __str__(self): + return "{0}{1}".format(*self._spec) + + def __hash__(self): + return hash(self._spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec == other._spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + return getattr(self, "_compare_{0}".format(self._operators[op])) + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + return self._spec[0] + + @property + def version(self): + return self._spec[1] + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + return self._get_operator(self.operator)(item, self.version) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if (parsed_version.is_prerelease and not + (prereleases or self.prereleases)): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the begining. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P<operator>(==|!=|<=|>=|<|>)) + \s* + (?P<version> + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + ) + + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P<operator>(~=|==|!=|<=|>=|<|>|===)) + (?P<version> + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?<!==|!=|~=) # We have special cases for these + # operators so we want to make sure they + # don't match here. + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + ) + """ + ) + + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "~=": "compatible", + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") and not + x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return (self._get_operator(">=")(prospective, spec) and + self._get_operator("==")(prospective, prefix)) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + prospective = prospective[:len(spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + spec, prospective = _pad_version(spec, prospective) + else: + # Convert our spec string into a Version + spec = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec.local: + prospective = Version(prospective.public) + + return prospective == spec + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return prospective <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is techincally greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]):]) + right_split.append(right[len(right_split[0]):]) + + # Insert our padding + left_split.insert( + 1, + ["0"] * max(0, len(right_split[0]) - len(left_split[0])), + ) + right_split.insert( + 1, + ["0"] * max(0, len(left_split[0]) - len(right_split[0])), + ) + + return ( + list(itertools.chain(*left_split)), + list(itertools.chain(*right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + + def __init__(self, specifiers="", prereleases=None): + # Split on , to break each indidivual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<SpecifierSet({0!r}{1})>".format(str(self), pre) + + def __str__(self): + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + return len(self._specs) + + def __iter__(self): + return iter(self._specs) + + @property + def prereleases(self): + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all( + s.contains(item, prereleases=prereleases) + for s in self._specs + ) + + def filter(self, iterable, prereleases=None): + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] + found_prereleases = [] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/utils.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..942387cef5d75f299a769b1eb43b6c7679e7a3a0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/utils.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re + + +_canonicalize_regex = re.compile(r"[-_.]+") + + +def canonicalize_name(name): + # This is taken from PEP 503. + return _canonicalize_regex.sub("-", name).lower() diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/version.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/version.py new file mode 100644 index 0000000000000000000000000000000000000000..83b5ee8c5efadf22ce2f16ff08c8a8d75f1eb5df --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/packaging/version.py @@ -0,0 +1,393 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = [ + "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" +] + + +_Version = collections.namedtuple( + "_Version", + ["epoch", "release", "dev", "pre", "post", "local"], +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "<LegacyVersion({0})>".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, +) + +_legacy_version_replacement_map = { + "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P<epoch>[0-9]+)!)? # epoch + (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment + (?P<pre> # pre-release + [-_\.]? + (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) + [-_\.]? + (?P<pre_n>[0-9]+)? + )? + (?P<post> # post release + (?:-(?P<post_n1>[0-9]+)) + | + (?: + [-_\.]? + (?P<post_l>post|rev|r) + [-_\.]? + (?P<post_n2>[0-9]+)? + ) + )? + (?P<dev> # dev release + [-_\.]? + (?P<dev_l>dev) + [-_\.]? + (?P<dev_n>[0-9]+)? + )? + ) + (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version +""" + + +class Version(_BaseVersion): + + _regex = re.compile( + r"^\s*" + VERSION_PATTERN + r"\s*$", + re.VERBOSE | re.IGNORECASE, + ) + + def __init__(self, version): + # Validate the version and parse it into pieces + match = self._regex.search(version) + if not match: + raise InvalidVersion("Invalid version: '{0}'".format(version)) + + # Store the parsed out pieces of the version + self._version = _Version( + epoch=int(match.group("epoch")) if match.group("epoch") else 0, + release=tuple(int(i) for i in match.group("release").split(".")), + pre=_parse_letter_version( + match.group("pre_l"), + match.group("pre_n"), + ), + post=_parse_letter_version( + match.group("post_l"), + match.group("post_n1") or match.group("post_n2"), + ), + dev=_parse_letter_version( + match.group("dev_l"), + match.group("dev_n"), + ), + local=_parse_local_version(match.group("local")), + ) + + # Generate a key which will be used for sorting + self._key = _cmpkey( + self._version.epoch, + self._version.release, + self._version.pre, + self._version.post, + self._version.dev, + self._version.local, + ) + + def __repr__(self): + return "<Version({0})>".format(repr(str(self))) + + def __str__(self): + parts = [] + + # Epoch + if self._version.epoch != 0: + parts.append("{0}!".format(self._version.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self._version.release)) + + # Pre-release + if self._version.pre is not None: + parts.append("".join(str(x) for x in self._version.pre)) + + # Post-release + if self._version.post is not None: + parts.append(".post{0}".format(self._version.post[1])) + + # Development release + if self._version.dev is not None: + parts.append(".dev{0}".format(self._version.dev[1])) + + # Local version segment + if self._version.local is not None: + parts.append( + "+{0}".format(".".join(str(x) for x in self._version.local)) + ) + + return "".join(parts) + + @property + def public(self): + return str(self).split("+", 1)[0] + + @property + def base_version(self): + parts = [] + + # Epoch + if self._version.epoch != 0: + parts.append("{0}!".format(self._version.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self._version.release)) + + return "".join(parts) + + @property + def local(self): + version_string = str(self) + if "+" in version_string: + return version_string.split("+", 1)[1] + + @property + def is_prerelease(self): + return bool(self._version.dev or self._version.pre) + + @property + def is_postrelease(self): + return bool(self._version.post) + + +def _parse_letter_version(letter, number): + if letter: + # We consider there to be an implicit 0 in a pre-release if there is + # not a numeral associated with it. + if number is None: + number = 0 + + # We normalize any letters to their lower case form + letter = letter.lower() + + # We consider some words to be alternate spellings of other words and + # in those cases we want to normalize the spellings to our preferred + # spelling. + if letter == "alpha": + letter = "a" + elif letter == "beta": + letter = "b" + elif letter in ["c", "pre", "preview"]: + letter = "rc" + elif letter in ["rev", "r"]: + letter = "post" + + return letter, int(number) + if not letter and number: + # We assume if we are given a number, but we are not given a letter + # then this is using the implicit post release syntax (e.g. 1.0-1) + letter = "post" + + return letter, int(number) + + +_local_version_seperators = re.compile(r"[\._-]") + + +def _parse_local_version(local): + """ + Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). + """ + if local is not None: + return tuple( + part.lower() if not part.isdigit() else int(part) + for part in _local_version_seperators.split(local) + ) + + +def _cmpkey(epoch, release, pre, post, dev, local): + # When we compare a release version, we want to compare it with all of the + # trailing zeros removed. So we'll use a reverse the list, drop all the now + # leading zeros until we come to something non zero, then take the rest + # re-reverse it back into the correct order and make it a tuple and use + # that for our sorting key. + release = tuple( + reversed(list( + itertools.dropwhile( + lambda x: x == 0, + reversed(release), + ) + )) + ) + + # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. + # We'll do this by abusing the pre segment, but we _only_ want to do this + # if there is not a pre or a post segment. If we have one of those then + # the normal sorting rules will handle this case correctly. + if pre is None and post is None and dev is not None: + pre = -Infinity + # Versions without a pre-release (except as noted above) should sort after + # those with one. + elif pre is None: + pre = Infinity + + # Versions without a post segment should sort before those with one. + if post is None: + post = -Infinity + + # Versions without a development segment should sort after those with one. + if dev is None: + dev = Infinity + + if local is None: + # Versions without a local segment should sort before those with one. + local = -Infinity + else: + # Versions with a local segment need that segment parsed to implement + # the sorting rules in PEP440. + # - Alpha numeric segments sort before numeric segments + # - Alpha numeric segments sort lexicographically + # - Numeric segments sort numerically + # - Shorter versions sort before longer versions when the prefixes + # match exactly + local = tuple( + (i, "") if isinstance(i, int) else (-Infinity, i) + for i in local + ) + + return epoch, release, pre, post, dev, local diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/pyparsing.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/pyparsing.py new file mode 100644 index 0000000000000000000000000000000000000000..cf75e1e5fcbfe7eac41d2a9e446c5c980741087b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/pyparsing.py @@ -0,0 +1,5742 @@ +# module pyparsing.py +# +# Copyright (c) 2003-2018 Paul T. McGuire +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = \ +""" +pyparsing module - Classes and methods to define and execute parsing grammars +============================================================================= + +The pyparsing module is an alternative approach to creating and executing simple grammars, +vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you +don't need to learn a new syntax for defining grammars or matching expressions - the parsing module +provides a library of classes that you use to construct the grammar directly in Python. + +Here is a program to parse "Hello, World!" (or any greeting of the form +C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements +(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to +L{Literal} expressions):: + + from pyparsing import Word, alphas + + # define grammar of a greeting + greet = Word(alphas) + "," + Word(alphas) + "!" + + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + +The program outputs the following:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + +The Python representation of the grammar is quite readable, owing to the self-explanatory +class names, and the use of '+', '|' and '^' operators. + +The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an +object with named attributes. + +The pyparsing module handles some of the problems that are typically vexing when writing text parsers: + - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) + - quoted strings + - embedded comments + + +Getting Started - +----------------- +Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing +classes inherit from. Use the docstrings for examples of how to: + - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes + - construct character word-group expressions using the L{Word} class + - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes + - use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones + - associate names with your parsed results using L{ParserElement.setResultsName} + - find some helpful expression short-cuts like L{delimitedList} and L{oneOf} + - find more useful common expressions in the L{pyparsing_common} namespace class +""" + +__version__ = "2.2.1" +__versionTime__ = "18 Sep 2018 00:49 UTC" +__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" + +import string +from weakref import ref as wkref +import copy +import sys +import warnings +import re +import sre_constants +import collections +import pprint +import traceback +import types +from datetime import datetime + +try: + from _thread import RLock +except ImportError: + from threading import RLock + +try: + # Python 3 + from collections.abc import Iterable + from collections.abc import MutableMapping +except ImportError: + # Python 2.7 + from collections import Iterable + from collections import MutableMapping + +try: + from collections import OrderedDict as _OrderedDict +except ImportError: + try: + from ordereddict import OrderedDict as _OrderedDict + except ImportError: + _OrderedDict = None + +#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) + +__all__ = [ +'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', +'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', +'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', +'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', +'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', +'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', +'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', +'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', +'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', +'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', +'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', +'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', +'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', +'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', +'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', +'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', +'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', +'CloseMatch', 'tokenMap', 'pyparsing_common', +] + +system_version = tuple(sys.version_info)[:3] +PY_3 = system_version[0] == 3 +if PY_3: + _MAX_INT = sys.maxsize + basestring = str + unichr = chr + _ustr = str + + # build list of single arg builtins, that can be used as parse actions + singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] + +else: + _MAX_INT = sys.maxint + range = xrange + + def _ustr(obj): + """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries + str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It + then < returns the unicode object | encodes it with the default encoding | ... >. + """ + if isinstance(obj,unicode): + return obj + + try: + # If this works, then _ustr(obj) has the same behaviour as str(obj), so + # it won't break any existing code. + return str(obj) + + except UnicodeEncodeError: + # Else encode it + ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') + xmlcharref = Regex(r'&#\d+;') + xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) + return xmlcharref.transformString(ret) + + # build list of single arg builtins, tolerant of Python version, that can be used as parse actions + singleArgBuiltins = [] + import __builtin__ + for fname in "sum len sorted reversed list tuple set any all min max".split(): + try: + singleArgBuiltins.append(getattr(__builtin__,fname)) + except AttributeError: + continue + +_generatorType = type((y for y in range(1))) + +def _xml_escape(data): + """Escape &, <, >, ", ', etc. in a string of data.""" + + # ampersand must be replaced first + from_symbols = '&><"\'' + to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) + for from_,to_ in zip(from_symbols, to_symbols): + data = data.replace(from_, to_) + return data + +class _Constants(object): + pass + +alphas = string.ascii_uppercase + string.ascii_lowercase +nums = "0123456789" +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) +printables = "".join(c for c in string.printable if c not in string.whitespace) + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, pstr, loc=0, msg=None, elem=None ): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parserElement = elem + self.args = (pstr, loc, msg) + + @classmethod + def _from_exception(cls, pe): + """ + internal factory method to simplify creating one type of ParseException + from another - avoids having __init__ signature conflicts among subclasses + """ + return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) + + def __getattr__( self, aname ): + """supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + if( aname == "lineno" ): + return lineno( self.loc, self.pstr ) + elif( aname in ("col", "column") ): + return col( self.loc, self.pstr ) + elif( aname == "line" ): + return line( self.loc, self.pstr ) + else: + raise AttributeError(aname) + + def __str__( self ): + return "%s (at char %d), (line:%d, col:%d)" % \ + ( self.msg, self.loc, self.lineno, self.column ) + def __repr__( self ): + return _ustr(self) + def markInputline( self, markerString = ">!<" ): + """Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join((line_str[:line_column], + markerString, line_str[line_column:])) + return line_str.strip() + def __dir__(self): + return "lineno col line".split() + dir(type(self)) + +class ParseException(ParseBaseException): + """ + Exception thrown when parse expressions don't match class; + supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + + Example:: + try: + Word(nums).setName("integer").parseString("ABC") + except ParseException as pe: + print(pe) + print("column: {}".format(pe.col)) + + prints:: + Expected integer (at char 0), (line:1, col:1) + column: 1 + """ + pass + +class ParseFatalException(ParseBaseException): + """user-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately""" + pass + +class ParseSyntaxException(ParseFatalException): + """just like L{ParseFatalException}, but thrown internally when an + L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop + immediately because an unbacktrackable syntax error has been found""" + pass + +#~ class ReparseException(ParseBaseException): + #~ """Experimental class - parse actions can raise this exception to cause + #~ pyparsing to reparse the input string: + #~ - with a modified input string, and/or + #~ - with a modified start location + #~ Set the values of the ReparseException in the constructor, and raise the + #~ exception in a parse action to cause pyparsing to use the new string/location. + #~ Setting the values as None causes no change to be made. + #~ """ + #~ def __init_( self, newstring, restartLoc ): + #~ self.newParseText = newstring + #~ self.reparseLoc = restartLoc + +class RecursiveGrammarException(Exception): + """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" + def __init__( self, parseElementList ): + self.parseElementTrace = parseElementList + + def __str__( self ): + return "RecursiveGrammarException: %s" % self.parseElementTrace + +class _ParseResultsWithOffset(object): + def __init__(self,p1,p2): + self.tup = (p1,p2) + def __getitem__(self,i): + return self.tup[i] + def __repr__(self): + return repr(self.tup[0]) + def setOffset(self,i): + self.tup = (self.tup[0],i) + +class ParseResults(object): + """ + Structured parse results, to provide multiple means of access to the parsed data: + - as a list (C{len(results)}) + - by list index (C{results[0], results[1]}, etc.) + - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName}) + + Example:: + integer = Word(nums) + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + # equivalent form: + # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + # parseString returns a ParseResults object + result = date_str.parseString("1999/12/31") + + def test(s, fn=repr): + print("%s -> %s" % (s, fn(eval(s)))) + test("list(result)") + test("result[0]") + test("result['month']") + test("result.day") + test("'month' in result") + test("'minutes' in result") + test("result.dump()", str) + prints:: + list(result) -> ['1999', '/', '12', '/', '31'] + result[0] -> '1999' + result['month'] -> '12' + result.day -> '31' + 'month' in result -> True + 'minutes' in result -> False + result.dump() -> ['1999', '/', '12', '/', '31'] + - day: 31 + - month: 12 + - year: 1999 + """ + def __new__(cls, toklist=None, name=None, asList=True, modal=True ): + if isinstance(toklist, cls): + return toklist + retobj = object.__new__(cls) + retobj.__doinit = True + return retobj + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): + if self.__doinit: + self.__doinit = False + self.__name = None + self.__parent = None + self.__accumNames = {} + self.__asList = asList + self.__modal = modal + if toklist is None: + toklist = [] + if isinstance(toklist, list): + self.__toklist = toklist[:] + elif isinstance(toklist, _generatorType): + self.__toklist = list(toklist) + else: + self.__toklist = [toklist] + self.__tokdict = dict() + + if name is not None and name: + if not modal: + self.__accumNames[name] = 0 + if isinstance(name,int): + name = _ustr(name) # will always return a str, but use _ustr for consistency + self.__name = name + if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): + if isinstance(toklist,basestring): + toklist = [ toklist ] + if asList: + if isinstance(toklist,ParseResults): + self[name] = _ParseResultsWithOffset(toklist.copy(),0) + else: + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) + self[name].__name = name + else: + try: + self[name] = toklist[0] + except (KeyError,TypeError,IndexError): + self[name] = toklist + + def __getitem__( self, i ): + if isinstance( i, (int,slice) ): + return self.__toklist[i] + else: + if i not in self.__accumNames: + return self.__tokdict[i][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[i] ]) + + def __setitem__( self, k, v, isinstance=isinstance ): + if isinstance(v,_ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] + sub = v[0] + elif isinstance(k,(int,slice)): + self.__toklist[k] = v + sub = v + else: + self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] + sub = v + if isinstance(sub,ParseResults): + sub.__parent = wkref(self) + + def __delitem__( self, i ): + if isinstance(i,(int,slice)): + mylen = len( self.__toklist ) + del self.__toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i+1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for name,occurrences in self.__tokdict.items(): + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) + else: + del self.__tokdict[i] + + def __contains__( self, k ): + return k in self.__tokdict + + def __len__( self ): return len( self.__toklist ) + def __bool__(self): return ( not not self.__toklist ) + __nonzero__ = __bool__ + def __iter__( self ): return iter( self.__toklist ) + def __reversed__( self ): return iter( self.__toklist[::-1] ) + def _iterkeys( self ): + if hasattr(self.__tokdict, "iterkeys"): + return self.__tokdict.iterkeys() + else: + return iter(self.__tokdict) + + def _itervalues( self ): + return (self[k] for k in self._iterkeys()) + + def _iteritems( self ): + return ((k, self[k]) for k in self._iterkeys()) + + if PY_3: + keys = _iterkeys + """Returns an iterator of all named result keys (Python 3.x only).""" + + values = _itervalues + """Returns an iterator of all named result values (Python 3.x only).""" + + items = _iteritems + """Returns an iterator of all named result key-value tuples (Python 3.x only).""" + + else: + iterkeys = _iterkeys + """Returns an iterator of all named result keys (Python 2.x only).""" + + itervalues = _itervalues + """Returns an iterator of all named result values (Python 2.x only).""" + + iteritems = _iteritems + """Returns an iterator of all named result key-value tuples (Python 2.x only).""" + + def keys( self ): + """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iterkeys()) + + def values( self ): + """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.itervalues()) + + def items( self ): + """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iteritems()) + + def haskeys( self ): + """Since keys() returns an iterator, this method is helpful in bypassing + code that looks for the existence of any defined results names.""" + return bool(self.__tokdict) + + def pop( self, *args, **kwargs): + """ + Removes and returns item at specified index (default=C{last}). + Supports both C{list} and C{dict} semantics for C{pop()}. If passed no + argument or an integer argument, it will use C{list} semantics + and pop tokens from the list of parsed tokens. If passed a + non-integer argument (most likely a string), it will use C{dict} + semantics and pop the corresponding value from any defined + results names. A second default return value argument is + supported, just as in C{dict.pop()}. + + Example:: + def remove_first(tokens): + tokens.pop(0) + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] + + label = Word(alphas) + patt = label("LABEL") + OneOrMore(Word(nums)) + print(patt.parseString("AAB 123 321").dump()) + + # Use pop() in a parse action to remove named result (note that corresponding value is not + # removed from list form of results) + def remove_LABEL(tokens): + tokens.pop("LABEL") + return tokens + patt.addParseAction(remove_LABEL) + print(patt.parseString("AAB 123 321").dump()) + prints:: + ['AAB', '123', '321'] + - LABEL: AAB + + ['AAB', '123', '321'] + """ + if not args: + args = [-1] + for k,v in kwargs.items(): + if k == 'default': + args = (args[0], v) + else: + raise TypeError("pop() got an unexpected keyword argument '%s'" % k) + if (isinstance(args[0], int) or + len(args) == 1 or + args[0] in self): + index = args[0] + ret = self[index] + del self[index] + return ret + else: + defaultvalue = args[1] + return defaultvalue + + def get(self, key, defaultValue=None): + """ + Returns named result matching the given key, or if there is no + such name, then returns the given C{defaultValue} or C{None} if no + C{defaultValue} is specified. + + Similar to C{dict.get()}. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString("1999/12/31") + print(result.get("year")) # -> '1999' + print(result.get("hour", "not specified")) # -> 'not specified' + print(result.get("hour")) # -> None + """ + if key in self: + return self[key] + else: + return defaultValue + + def insert( self, index, insStr ): + """ + Inserts new element at location index in the list of parsed tokens. + + Similar to C{list.insert()}. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to insert the parse location in the front of the parsed results + def insert_locn(locn, tokens): + tokens.insert(0, locn) + print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] + """ + self.__toklist.insert(index, insStr) + # fixup indices in token dictionary + for name,occurrences in self.__tokdict.items(): + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) + + def append( self, item ): + """ + Add single element to end of ParseResults list of elements. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to compute the sum of the parsed integers, and add it to the end + def append_sum(tokens): + tokens.append(sum(map(int, tokens))) + print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] + """ + self.__toklist.append(item) + + def extend( self, itemseq ): + """ + Add sequence of elements to end of ParseResults list of elements. + + Example:: + patt = OneOrMore(Word(alphas)) + + # use a parse action to append the reverse of the matched strings, to make a palindrome + def make_palindrome(tokens): + tokens.extend(reversed([t[::-1] for t in tokens])) + return ''.join(tokens) + print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' + """ + if isinstance(itemseq, ParseResults): + self += itemseq + else: + self.__toklist.extend(itemseq) + + def clear( self ): + """ + Clear all elements and results names. + """ + del self.__toklist[:] + self.__tokdict.clear() + + def __getattr__( self, name ): + try: + return self[name] + except KeyError: + return "" + + if name in self.__tokdict: + if name not in self.__accumNames: + return self.__tokdict[name][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[name] ]) + else: + return "" + + def __add__( self, other ): + ret = self.copy() + ret += other + return ret + + def __iadd__( self, other ): + if other.__tokdict: + offset = len(self.__toklist) + addoffset = lambda a: offset if a<0 else a+offset + otheritems = other.__tokdict.items() + otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) + for (k,vlist) in otheritems for v in vlist] + for k,v in otherdictitems: + self[k] = v + if isinstance(v[0],ParseResults): + v[0].__parent = wkref(self) + + self.__toklist += other.__toklist + self.__accumNames.update( other.__accumNames ) + return self + + def __radd__(self, other): + if isinstance(other,int) and other == 0: + # useful for merging many ParseResults using sum() builtin + return self.copy() + else: + # this may raise a TypeError - so be it + return other + self + + def __repr__( self ): + return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) + + def __str__( self ): + return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' + + def _asStringList( self, sep='' ): + out = [] + for item in self.__toklist: + if out and sep: + out.append(sep) + if isinstance( item, ParseResults ): + out += item._asStringList() + else: + out.append( _ustr(item) ) + return out + + def asList( self ): + """ + Returns the parse results as a nested list of matching tokens, all converted to strings. + + Example:: + patt = OneOrMore(Word(alphas)) + result = patt.parseString("sldkj lsdkj sldkj") + # even though the result prints in string-like form, it is actually a pyparsing ParseResults + print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj'] + + # Use asList() to create an actual list + result_list = result.asList() + print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] + """ + return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] + + def asDict( self ): + """ + Returns the named parse results as a nested dictionary. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) + + result_dict = result.asDict() + print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'} + + # even though a ParseResults supports dict-like access, sometime you just need to have a dict + import json + print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable + print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} + """ + if PY_3: + item_fn = self.items + else: + item_fn = self.iteritems + + def toItem(obj): + if isinstance(obj, ParseResults): + if obj.haskeys(): + return obj.asDict() + else: + return [toItem(v) for v in obj] + else: + return obj + + return dict((k,toItem(v)) for k,v in item_fn()) + + def copy( self ): + """ + Returns a new copy of a C{ParseResults} object. + """ + ret = ParseResults( self.__toklist ) + ret.__tokdict = self.__tokdict.copy() + ret.__parent = self.__parent + ret.__accumNames.update( self.__accumNames ) + ret.__name = self.__name + return ret + + def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): + """ + (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. + """ + nl = "\n" + out = [] + namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() + for v in vlist) + nextLevelIndent = indent + " " + + # collapse out indents if formatting is not desired + if not formatted: + indent = "" + nextLevelIndent = "" + nl = "" + + selfTag = None + if doctag is not None: + selfTag = doctag + else: + if self.__name: + selfTag = self.__name + + if not selfTag: + if namedItemsOnly: + return "" + else: + selfTag = "ITEM" + + out += [ nl, indent, "<", selfTag, ">" ] + + for i,res in enumerate(self.__toklist): + if isinstance(res,ParseResults): + if i in namedItems: + out += [ res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + out += [ res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + # individual token, see if there is a name for it + resTag = None + if i in namedItems: + resTag = namedItems[i] + if not resTag: + if namedItemsOnly: + continue + else: + resTag = "ITEM" + xmlBodyText = _xml_escape(_ustr(res)) + out += [ nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + "</", resTag, ">" ] + + out += [ nl, indent, "</", selfTag, ">" ] + return "".join(out) + + def __lookup(self,sub): + for k,vlist in self.__tokdict.items(): + for v,loc in vlist: + if sub is v: + return k + return None + + def getName(self): + r""" + Returns the results name for this token expression. Useful when several + different expressions might match at a particular location. + + Example:: + integer = Word(nums) + ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") + house_number_expr = Suppress('#') + Word(nums, alphanums) + user_data = (Group(house_number_expr)("house_number") + | Group(ssn_expr)("ssn") + | Group(integer)("age")) + user_info = OneOrMore(user_data) + + result = user_info.parseString("22 111-22-3333 #221B") + for item in result: + print(item.getName(), ':', item[0]) + prints:: + age : 22 + ssn : 111-22-3333 + house_number : 221B + """ + if self.__name: + return self.__name + elif self.__parent: + par = self.__parent() + if par: + return par.__lookup(self) + else: + return None + elif (len(self) == 1 and + len(self.__tokdict) == 1 and + next(iter(self.__tokdict.values()))[0][1] in (0,-1)): + return next(iter(self.__tokdict.keys())) + else: + return None + + def dump(self, indent='', depth=0, full=True): + """ + Diagnostic method for listing out the contents of a C{ParseResults}. + Accepts an optional C{indent} argument so that this string can be embedded + in a nested display of other data. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(result.dump()) + prints:: + ['12', '/', '31', '/', '1999'] + - day: 1999 + - month: 31 + - year: 12 + """ + out = [] + NL = '\n' + out.append( indent+_ustr(self.asList()) ) + if full: + if self.haskeys(): + items = sorted((str(k), v) for k,v in self.items()) + for k,v in items: + if out: + out.append(NL) + out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) + if isinstance(v,ParseResults): + if v: + out.append( v.dump(indent,depth+1) ) + else: + out.append(_ustr(v)) + else: + out.append(repr(v)) + elif any(isinstance(vv,ParseResults) for vv in self): + v = self + for i,vv in enumerate(v): + if isinstance(vv,ParseResults): + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) + else: + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) + + return "".join(out) + + def pprint(self, *args, **kwargs): + """ + Pretty-printer for parsed results as a list, using the C{pprint} module. + Accepts additional positional or keyword args as defined for the + C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) + + Example:: + ident = Word(alphas, alphanums) + num = Word(nums) + func = Forward() + term = ident | num | Group('(' + func + ')') + func <<= ident + Group(Optional(delimitedList(term))) + result = func.parseString("fna a,b,(fnb c,d,200),100") + result.pprint(width=40) + prints:: + ['fna', + ['a', + 'b', + ['(', 'fnb', ['c', 'd', '200'], ')'], + '100']] + """ + pprint.pprint(self.asList(), *args, **kwargs) + + # add support for pickle protocol + def __getstate__(self): + return ( self.__toklist, + ( self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name ) ) + + def __setstate__(self,state): + self.__toklist = state[0] + (self.__tokdict, + par, + inAccumNames, + self.__name) = state[1] + self.__accumNames = {} + self.__accumNames.update(inAccumNames) + if par is not None: + self.__parent = wkref(par) + else: + self.__parent = None + + def __getnewargs__(self): + return self.__toklist, self.__name, self.__asList, self.__modal + + def __dir__(self): + return (dir(type(self)) + list(self.keys())) + +MutableMapping.register(ParseResults) + +def col (loc,strg): + """Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + s = strg + return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) + +def lineno(loc,strg): + """Returns current line number within a string, counting newlines as line separators. + The first line is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + return strg.count("\n",0,loc) + 1 + +def line( loc, strg ): + """Returns the line of text containing loc within a string, counting newlines as line separators. + """ + lastCR = strg.rfind("\n", 0, loc) + nextCR = strg.find("\n", loc) + if nextCR >= 0: + return strg[lastCR+1:nextCR] + else: + return strg[lastCR+1:] + +def _defaultStartDebugAction( instring, loc, expr ): + print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) + +def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): + print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) + +def _defaultExceptionDebugAction( instring, loc, expr, exc ): + print ("Exception raised:" + _ustr(exc)) + +def nullDebugAction(*args): + """'Do-nothing' debug action, to suppress debugging output during parsing.""" + pass + +# Only works on Python 3.x - nonlocal is toxic to Python 2 installs +#~ 'decorator to trim function calls to match the arity of the target' +#~ def _trim_arity(func, maxargs=3): + #~ if func in singleArgBuiltins: + #~ return lambda s,l,t: func(t) + #~ limit = 0 + #~ foundArity = False + #~ def wrapper(*args): + #~ nonlocal limit,foundArity + #~ while 1: + #~ try: + #~ ret = func(*args[limit:]) + #~ foundArity = True + #~ return ret + #~ except TypeError: + #~ if limit == maxargs or foundArity: + #~ raise + #~ limit += 1 + #~ continue + #~ return wrapper + +# this version is Python 2.x-3.x cross-compatible +'decorator to trim function calls to match the arity of the target' +def _trim_arity(func, maxargs=2): + if func in singleArgBuiltins: + return lambda s,l,t: func(t) + limit = [0] + foundArity = [False] + + # traceback return data structure changed in Py3.5 - normalize back to plain tuples + if system_version[:2] >= (3,5): + def extract_stack(limit=0): + # special handling for Python 3.5.0 - extra deep call stack by 1 + offset = -3 if system_version == (3,5,0) else -2 + frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] + return [frame_summary[:2]] + def extract_tb(tb, limit=0): + frames = traceback.extract_tb(tb, limit=limit) + frame_summary = frames[-1] + return [frame_summary[:2]] + else: + extract_stack = traceback.extract_stack + extract_tb = traceback.extract_tb + + # synthesize what would be returned by traceback.extract_stack at the call to + # user's parse action 'func', so that we don't incur call penalty at parse time + + LINE_DIFF = 6 + # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND + # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! + this_line = extract_stack(limit=2)[-1] + pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) + + def wrapper(*args): + while 1: + try: + ret = func(*args[limit[0]:]) + foundArity[0] = True + return ret + except TypeError: + # re-raise TypeErrors if they did not come from our arity testing + if foundArity[0]: + raise + else: + try: + tb = sys.exc_info()[-1] + if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: + raise + finally: + del tb + + if limit[0] <= maxargs: + limit[0] += 1 + continue + raise + + # copy func name to wrapper for sensible debug output + func_name = "<parse action>" + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + wrapper.__name__ = func_name + + return wrapper + +class ParserElement(object): + """Abstract base level parser element class.""" + DEFAULT_WHITE_CHARS = " \n\t\r" + verbose_stacktrace = False + + @staticmethod + def setDefaultWhitespaceChars( chars ): + r""" + Overrides the default whitespace chars + + Example:: + # default whitespace chars are space, <TAB> and newline + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] + + # change to just treat newline as significant + ParserElement.setDefaultWhitespaceChars(" \t") + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] + """ + ParserElement.DEFAULT_WHITE_CHARS = chars + + @staticmethod + def inlineLiteralsUsing(cls): + """ + Set class to be used for inclusion of string literals into a parser. + + Example:: + # default literal class used is Literal + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + + # change to Suppress + ParserElement.inlineLiteralsUsing(Suppress) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] + """ + ParserElement._literalStringClass = cls + + def __init__( self, savelist=False ): + self.parseAction = list() + self.failAction = None + #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall + self.strRepr = None + self.resultsName = None + self.saveAsList = savelist + self.skipWhitespace = True + self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + self.copyDefaultWhiteChars = True + self.mayReturnEmpty = False # used when checking for left-recursion + self.keepTabs = False + self.ignoreExprs = list() + self.debug = False + self.streamlined = False + self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index + self.errmsg = "" + self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) + self.debugActions = ( None, None, None ) #custom debug actions + self.re = None + self.callPreparse = True # used to avoid redundant calls to preParse + self.callDuringTry = False + + def copy( self ): + """ + Make a copy of this C{ParserElement}. Useful for defining different parse actions + for the same parsing pattern, using copies of the original parse element. + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") + integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + + print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) + prints:: + [5120, 100, 655360, 268435456] + Equivalent form of C{expr.copy()} is just C{expr()}:: + integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + """ + cpy = copy.copy( self ) + cpy.parseAction = self.parseAction[:] + cpy.ignoreExprs = self.ignoreExprs[:] + if self.copyDefaultWhiteChars: + cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + return cpy + + def setName( self, name ): + """ + Define name for this expression, makes debugging and exception messages clearer. + + Example:: + Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) + Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) + """ + self.name = name + self.errmsg = "Expected " + self.name + if hasattr(self,"exception"): + self.exception.msg = self.errmsg + return self + + def setResultsName( self, name, listAllMatches=False ): + """ + Define name for referencing matching tokens as a nested attribute + of the returned parse results. + NOTE: this returns a *copy* of the original C{ParserElement} object; + this is so that the client can define a basic element, such as an + integer, and reference it in multiple places with different names. + + You can also set results names using the abbreviated syntax, + C{expr("name")} in place of C{expr.setResultsName("name")} - + see L{I{__call__}<__call__>}. + + Example:: + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + + # equivalent form: + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + """ + newself = self.copy() + if name.endswith("*"): + name = name[:-1] + listAllMatches=True + newself.resultsName = name + newself.modalResults = not listAllMatches + return newself + + def setBreak(self,breakFlag = True): + """Method to invoke the Python pdb debugger when this element is + about to be parsed. Set C{breakFlag} to True to enable, False to + disable. + """ + if breakFlag: + _parseMethod = self._parse + def breaker(instring, loc, doActions=True, callPreParse=True): + import pdb + pdb.set_trace() + return _parseMethod( instring, loc, doActions, callPreParse ) + breaker._originalParseMethod = _parseMethod + self._parse = breaker + else: + if hasattr(self._parse,"_originalParseMethod"): + self._parse = self._parse._originalParseMethod + return self + + def setParseAction( self, *fns, **kwargs ): + """ + Define one or more actions to perform when successfully matching parse element definition. + Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, + C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object + If the functions in fns modify the tokens, they can return them as the return + value from fn, and the modified list of tokens will replace the original. + Otherwise, fn does not need to return any value. + + Optional keyword arguments: + - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{parseString}<parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + + Example:: + integer = Word(nums) + date_str = integer + '/' + integer + '/' + integer + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + # use parse action to convert to ints at parse time + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + date_str = integer + '/' + integer + '/' + integer + + # note that integer fields are now ints, not strings + date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] + """ + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = kwargs.get("callDuringTry", False) + return self + + def addParseAction( self, *fns, **kwargs ): + """ + Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. + + See examples in L{I{copy}<copy>}. + """ + self.parseAction += list(map(_trim_arity, list(fns))) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def addCondition(self, *fns, **kwargs): + """Add a boolean predicate function to expression's list of parse actions. See + L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, + functions passed to C{addCondition} need to return boolean success/fail of the condition. + + Optional keyword arguments: + - message = define a custom message to be used in the raised exception + - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + year_int = integer.copy() + year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") + date_str = year_int + '/' + integer + '/' + integer + + result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) + """ + msg = kwargs.get("message", "failed user-defined condition") + exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException + for fn in fns: + def pa(s,l,t): + if not bool(_trim_arity(fn)(s,l,t)): + raise exc_type(s,l,msg) + self.parseAction.append(pa) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def setFailAction( self, fn ): + """Define action to perform if parsing fails at this expression. + Fail acton fn is a callable function that takes the arguments + C{fn(s,loc,expr,err)} where: + - s = string being parsed + - loc = location where expression match was attempted and failed + - expr = the parse expression that failed + - err = the exception thrown + The function returns no value. It may throw C{L{ParseFatalException}} + if it is desired to stop parsing immediately.""" + self.failAction = fn + return self + + def _skipIgnorables( self, instring, loc ): + exprsFound = True + while exprsFound: + exprsFound = False + for e in self.ignoreExprs: + try: + while 1: + loc,dummy = e._parse( instring, loc ) + exprsFound = True + except ParseException: + pass + return loc + + def preParse( self, instring, loc ): + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + + if self.skipWhitespace: + wt = self.whiteChars + instrlen = len(instring) + while loc < instrlen and instring[loc] in wt: + loc += 1 + + return loc + + def parseImpl( self, instring, loc, doActions=True ): + return loc, [] + + def postParse( self, instring, loc, tokenlist ): + return tokenlist + + #~ @profile + def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): + debugging = ( self.debug ) #and doActions ) + + if debugging or self.failAction: + #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) + if (self.debugActions[0] ): + self.debugActions[0]( instring, loc, self ) + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + try: + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + except ParseBaseException as err: + #~ print ("Exception raised:", err) + if self.debugActions[2]: + self.debugActions[2]( instring, tokensStart, self, err ) + if self.failAction: + self.failAction( instring, tokensStart, self, err ) + raise + else: + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or preloc >= len(instring): + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + else: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + + tokens = self.postParse( instring, loc, tokens ) + + retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) + if self.parseAction and (doActions or self.callDuringTry): + if debugging: + try: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + except ParseBaseException as err: + #~ print "Exception raised in user parse action:", err + if (self.debugActions[2] ): + self.debugActions[2]( instring, tokensStart, self, err ) + raise + else: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + if debugging: + #~ print ("Matched",self,"->",retTokens.asList()) + if (self.debugActions[1] ): + self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) + + return loc, retTokens + + def tryParse( self, instring, loc ): + try: + return self._parse( instring, loc, doActions=False )[0] + except ParseFatalException: + raise ParseException( instring, loc, self.errmsg, self) + + def canParseNext(self, instring, loc): + try: + self.tryParse(instring, loc) + except (ParseException, IndexError): + return False + else: + return True + + class _UnboundedCache(object): + def __init__(self): + cache = {} + self.not_in_cache = not_in_cache = object() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + + def clear(self): + cache.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + if _OrderedDict is not None: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = _OrderedDict() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + while len(cache) > size: + try: + cache.popitem(False) + except KeyError: + pass + + def clear(self): + cache.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + else: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = {} + key_fifo = collections.deque([], size) + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + while len(key_fifo) > size: + cache.pop(key_fifo.popleft(), None) + key_fifo.append(key) + + def clear(self): + cache.clear() + key_fifo.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + # argument cache for optimizing repeated calls when backtracking through recursive expressions + packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail + packrat_cache_lock = RLock() + packrat_cache_stats = [0, 0] + + # this method gets repeatedly called during backtracking with the same arguments - + # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression + def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): + HIT, MISS = 0, 1 + lookup = (self, instring, loc, callPreParse, doActions) + with ParserElement.packrat_cache_lock: + cache = ParserElement.packrat_cache + value = cache.get(lookup) + if value is cache.not_in_cache: + ParserElement.packrat_cache_stats[MISS] += 1 + try: + value = self._parseNoCache(instring, loc, doActions, callPreParse) + except ParseBaseException as pe: + # cache a copy of the exception, without the traceback + cache.set(lookup, pe.__class__(*pe.args)) + raise + else: + cache.set(lookup, (value[0], value[1].copy())) + return value + else: + ParserElement.packrat_cache_stats[HIT] += 1 + if isinstance(value, Exception): + raise value + return (value[0], value[1].copy()) + + _parse = _parseNoCache + + @staticmethod + def resetCache(): + ParserElement.packrat_cache.clear() + ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) + + _packratEnabled = False + @staticmethod + def enablePackrat(cache_size_limit=128): + """Enables "packrat" parsing, which adds memoizing to the parsing logic. + Repeated parse attempts at the same string location (which happens + often in many complex grammars) can immediately return a cached value, + instead of re-executing parsing/validating code. Memoizing is done of + both valid results and parsing exceptions. + + Parameters: + - cache_size_limit - (default=C{128}) - if an integer value is provided + will limit the size of the packrat cache; if None is passed, then + the cache size will be unbounded; if 0 is passed, the cache will + be effectively disabled. + + This speedup may break existing programs that use parse actions that + have side-effects. For this reason, packrat parsing is disabled when + you first import pyparsing. To activate the packrat feature, your + program must call the class method C{ParserElement.enablePackrat()}. If + your program uses C{psyco} to "compile as you go", you must call + C{enablePackrat} before calling C{psyco.full()}. If you do not do this, + Python will crash. For best results, call C{enablePackrat()} immediately + after importing pyparsing. + + Example:: + import pyparsing + pyparsing.ParserElement.enablePackrat() + """ + if not ParserElement._packratEnabled: + ParserElement._packratEnabled = True + if cache_size_limit is None: + ParserElement.packrat_cache = ParserElement._UnboundedCache() + else: + ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) + ParserElement._parse = ParserElement._parseCache + + def parseString( self, instring, parseAll=False ): + """ + Execute the parse expression with the given string. + This is the main interface to the client code, once the complete + expression has been built. + + If you want the grammar to require that the entire input string be + successfully parsed, then set C{parseAll} to True (equivalent to ending + the grammar with C{L{StringEnd()}}). + + Note: C{parseString} implicitly calls C{expandtabs()} on the input string, + in order to report proper column numbers in parse actions. + If the input string contains tabs and + the grammar uses parse actions that use the C{loc} argument to index into the + string being parsed, you can ensure you have a consistent view of the input + string by: + - calling C{parseWithTabs} on your grammar before calling C{parseString} + (see L{I{parseWithTabs}<parseWithTabs>}) + - define your parse action using the full C{(s,loc,toks)} signature, and + reference the input string using the parse action's C{s} argument + - explictly expand the tabs in your input string before calling + C{parseString} + + Example:: + Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] + Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text + """ + ParserElement.resetCache() + if not self.streamlined: + self.streamline() + #~ self.saveAsList = True + for e in self.ignoreExprs: + e.streamline() + if not self.keepTabs: + instring = instring.expandtabs() + try: + loc, tokens = self._parse( instring, 0 ) + if parseAll: + loc = self.preParse( instring, loc ) + se = Empty() + StringEnd() + se._parse( instring, loc ) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + else: + return tokens + + def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): + """ + Scan the input string for expression matches. Each match will return the + matching tokens, start location, and end location. May be called with optional + C{maxMatches} argument, to clip scanning after 'n' matches are found. If + C{overlap} is specified, then overlapping matches will be reported. + + Note that the start and end locations are reported relative to the string + being parsed. See L{I{parseString}<parseString>} for more information on parsing + strings with embedded tabs. + + Example:: + source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" + print(source) + for tokens,start,end in Word(alphas).scanString(source): + print(' '*start + '^'*(end-start)) + print(' '*start + tokens[0]) + + prints:: + + sldjf123lsdjjkf345sldkjf879lkjsfd987 + ^^^^^ + sldjf + ^^^^^^^ + lsdjjkf + ^^^^^^ + sldkjf + ^^^^^^ + lkjsfd + """ + if not self.streamlined: + self.streamline() + for e in self.ignoreExprs: + e.streamline() + + if not self.keepTabs: + instring = _ustr(instring).expandtabs() + instrlen = len(instring) + loc = 0 + preparseFn = self.preParse + parseFn = self._parse + ParserElement.resetCache() + matches = 0 + try: + while loc <= instrlen and matches < maxMatches: + try: + preloc = preparseFn( instring, loc ) + nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) + except ParseException: + loc = preloc+1 + else: + if nextLoc > loc: + matches += 1 + yield tokens, preloc, nextLoc + if overlap: + nextloc = preparseFn( instring, loc ) + if nextloc > loc: + loc = nextLoc + else: + loc += 1 + else: + loc = nextLoc + else: + loc = preloc+1 + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def transformString( self, instring ): + """ + Extension to C{L{scanString}}, to modify matching text with modified tokens that may + be returned from a parse action. To use C{transformString}, define a grammar and + attach a parse action to it that modifies the returned token list. + Invoking C{transformString()} on a target string will then scan for matches, + and replace the matched text patterns according to the logic in the parse + action. C{transformString()} returns the resulting transformed string. + + Example:: + wd = Word(alphas) + wd.setParseAction(lambda toks: toks[0].title()) + + print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) + Prints:: + Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. + """ + out = [] + lastE = 0 + # force preservation of <TAB>s, to minimize unwanted transformation of string, and to + # keep string locs straight between transformString and scanString + self.keepTabs = True + try: + for t,s,e in self.scanString( instring ): + out.append( instring[lastE:s] ) + if t: + if isinstance(t,ParseResults): + out += t.asList() + elif isinstance(t,list): + out += t + else: + out.append(t) + lastE = e + out.append(instring[lastE:]) + out = [o for o in out if o] + return "".join(map(_ustr,_flatten(out))) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def searchString( self, instring, maxMatches=_MAX_INT ): + """ + Another extension to C{L{scanString}}, simplifying the access to the tokens found + to match the given parse expression. May be called with optional + C{maxMatches} argument, to clip searching after 'n' matches are found. + + Example:: + # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters + cap_word = Word(alphas.upper(), alphas.lower()) + + print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) + + # the sum() builtin can be used to merge results into a single ParseResults object + print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) + prints:: + [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] + ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] + """ + try: + return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): + """ + Generator method to split a string using the given expression as a separator. + May be called with optional C{maxsplit} argument, to limit the number of splits; + and the optional C{includeSeparators} argument (default=C{False}), if the separating + matching text should be included in the split results. + + Example:: + punc = oneOf(list(".,;:/-!?")) + print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) + prints:: + ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] + """ + splits = 0 + last = 0 + for t,s,e in self.scanString(instring, maxMatches=maxsplit): + yield instring[last:s] + if includeSeparators: + yield t[0] + last = e + yield instring[last:] + + def __add__(self, other ): + """ + Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement + converts them to L{Literal}s by default. + + Example:: + greet = Word(alphas) + "," + Word(alphas) + "!" + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + Prints:: + Hello, World! -> ['Hello', ',', 'World', '!'] + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, other ] ) + + def __radd__(self, other ): + """ + Implementation of + operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other + self + + def __sub__(self, other): + """ + Implementation of - operator, returns C{L{And}} with error stop + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return self + And._ErrorStop() + other + + def __rsub__(self, other ): + """ + Implementation of - operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other - self + + def __mul__(self,other): + """ + Implementation of * operator, allows use of C{expr * 3} in place of + C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer + tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples + may also include C{None} as in: + - C{expr*(n,None)} or C{expr*(n,)} is equivalent + to C{expr*n + L{ZeroOrMore}(expr)} + (read as "at least n instances of C{expr}") + - C{expr*(None,n)} is equivalent to C{expr*(0,n)} + (read as "0 to n instances of C{expr}") + - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} + - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} + + Note that C{expr*(None,n)} does not raise an exception if + more than n exprs exist in the input stream; that is, + C{expr*(None,n)} does not enforce a maximum number of expr + occurrences. If this behavior is desired, then write + C{expr*(None,n) + ~expr} + """ + if isinstance(other,int): + minElements, optElements = other,0 + elif isinstance(other,tuple): + other = (other + (None, None))[:2] + if other[0] is None: + other = (0, other[1]) + if isinstance(other[0],int) and other[1] is None: + if other[0] == 0: + return ZeroOrMore(self) + if other[0] == 1: + return OneOrMore(self) + else: + return self*other[0] + ZeroOrMore(self) + elif isinstance(other[0],int) and isinstance(other[1],int): + minElements, optElements = other + optElements -= minElements + else: + raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) + else: + raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) + + if minElements < 0: + raise ValueError("cannot multiply ParserElement by negative value") + if optElements < 0: + raise ValueError("second tuple value must be greater or equal to first tuple value") + if minElements == optElements == 0: + raise ValueError("cannot multiply ParserElement by 0 or (0,0)") + + if (optElements): + def makeOptionalList(n): + if n>1: + return Optional(self + makeOptionalList(n-1)) + else: + return Optional(self) + if minElements: + if minElements == 1: + ret = self + makeOptionalList(optElements) + else: + ret = And([self]*minElements) + makeOptionalList(optElements) + else: + ret = makeOptionalList(optElements) + else: + if minElements == 1: + ret = self + else: + ret = And([self]*minElements) + return ret + + def __rmul__(self, other): + return self.__mul__(other) + + def __or__(self, other ): + """ + Implementation of | operator - returns C{L{MatchFirst}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return MatchFirst( [ self, other ] ) + + def __ror__(self, other ): + """ + Implementation of | operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other | self + + def __xor__(self, other ): + """ + Implementation of ^ operator - returns C{L{Or}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Or( [ self, other ] ) + + def __rxor__(self, other ): + """ + Implementation of ^ operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other ^ self + + def __and__(self, other ): + """ + Implementation of & operator - returns C{L{Each}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Each( [ self, other ] ) + + def __rand__(self, other ): + """ + Implementation of & operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other & self + + def __invert__( self ): + """ + Implementation of ~ operator - returns C{L{NotAny}} + """ + return NotAny( self ) + + def __call__(self, name=None): + """ + Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. + + If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be + passed as C{True}. + + If C{name} is omitted, same as calling C{L{copy}}. + + Example:: + # these are equivalent + userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") + userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + """ + if name is not None: + return self.setResultsName(name) + else: + return self.copy() + + def suppress( self ): + """ + Suppresses the output of this C{ParserElement}; useful to keep punctuation from + cluttering up returned output. + """ + return Suppress( self ) + + def leaveWhitespace( self ): + """ + Disables the skipping of whitespace before matching the characters in the + C{ParserElement}'s defined pattern. This is normally only used internally by + the pyparsing module, but may be needed in some whitespace-sensitive grammars. + """ + self.skipWhitespace = False + return self + + def setWhitespaceChars( self, chars ): + """ + Overrides the default whitespace chars + """ + self.skipWhitespace = True + self.whiteChars = chars + self.copyDefaultWhiteChars = False + return self + + def parseWithTabs( self ): + """ + Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string. + Must be called before C{parseString} when the input grammar contains elements that + match C{<TAB>} characters. + """ + self.keepTabs = True + return self + + def ignore( self, other ): + """ + Define expression to be ignored (e.g., comments) while doing pattern + matching; may be called repeatedly, to define multiple comment or other + ignorable patterns. + + Example:: + patt = OneOrMore(Word(alphas)) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] + + patt.ignore(cStyleComment) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] + """ + if isinstance(other, basestring): + other = Suppress(other) + + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + self.ignoreExprs.append(other) + else: + self.ignoreExprs.append( Suppress( other.copy() ) ) + return self + + def setDebugActions( self, startAction, successAction, exceptionAction ): + """ + Enable display of debugging messages while doing pattern matching. + """ + self.debugActions = (startAction or _defaultStartDebugAction, + successAction or _defaultSuccessDebugAction, + exceptionAction or _defaultExceptionDebugAction) + self.debug = True + return self + + def setDebug( self, flag=True ): + """ + Enable display of debugging messages while doing pattern matching. + Set C{flag} to True to enable, False to disable. + + Example:: + wd = Word(alphas).setName("alphaword") + integer = Word(nums).setName("numword") + term = wd | integer + + # turn on debugging for wd + wd.setDebug() + + OneOrMore(term).parseString("abc 123 xyz 890") + + prints:: + Match alphaword at loc 0(1,1) + Matched alphaword -> ['abc'] + Match alphaword at loc 3(1,4) + Exception raised:Expected alphaword (at char 4), (line:1, col:5) + Match alphaword at loc 7(1,8) + Matched alphaword -> ['xyz'] + Match alphaword at loc 11(1,12) + Exception raised:Expected alphaword (at char 12), (line:1, col:13) + Match alphaword at loc 15(1,16) + Exception raised:Expected alphaword (at char 15), (line:1, col:16) + + The output shown is that produced by the default debug actions - custom debug actions can be + specified using L{setDebugActions}. Prior to attempting + to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"} + is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} + message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, + which makes debugging and exception messages easier to understand - for instance, the default + name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. + """ + if flag: + self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) + else: + self.debug = False + return self + + def __str__( self ): + return self.name + + def __repr__( self ): + return _ustr(self) + + def streamline( self ): + self.streamlined = True + self.strRepr = None + return self + + def checkRecursion( self, parseElementList ): + pass + + def validate( self, validateTrace=[] ): + """ + Check defined expressions for valid structure, check for infinite recursive definitions. + """ + self.checkRecursion( [] ) + + def parseFile( self, file_or_filename, parseAll=False ): + """ + Execute the parse expression on the given file or filename. + If a filename is specified (instead of a file object), + the entire file is opened, read, and closed before parsing. + """ + try: + file_contents = file_or_filename.read() + except AttributeError: + with open(file_or_filename, "r") as f: + file_contents = f.read() + try: + return self.parseString(file_contents, parseAll) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def __eq__(self,other): + if isinstance(other, ParserElement): + return self is other or vars(self) == vars(other) + elif isinstance(other, basestring): + return self.matches(other) + else: + return super(ParserElement,self)==other + + def __ne__(self,other): + return not (self == other) + + def __hash__(self): + return hash(id(self)) + + def __req__(self,other): + return self == other + + def __rne__(self,other): + return not (self == other) + + def matches(self, testString, parseAll=True): + """ + Method for quick testing of a parser against a test string. Good for simple + inline microtests of sub expressions while building up larger parser. + + Parameters: + - testString - to test against this expression for a match + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + + Example:: + expr = Word(nums) + assert expr.matches("100") + """ + try: + self.parseString(_ustr(testString), parseAll=parseAll) + return True + except ParseBaseException: + return False + + def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): + """ + Execute the parse expression on a series of test strings, showing each + test, the parsed results or where the parse failed. Quick and easy way to + run a parse expression against a list of sample strings. + + Parameters: + - tests - a list of separate test strings, or a multiline string of test strings + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + - comment - (default=C{'#'}) - expression for indicating embedded comments in the test + string; pass None to disable comment filtering + - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; + if False, only dump nested list + - printResults - (default=C{True}) prints test output to stdout + - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing + + Returns: a (success, results) tuple, where success indicates that all tests succeeded + (or failed if C{failureTests} is True), and the results contain a list of lines of each + test's output + + Example:: + number_expr = pyparsing_common.number.copy() + + result = number_expr.runTests(''' + # unsigned integer + 100 + # negative integer + -100 + # float with scientific notation + 6.02e23 + # integer with scientific notation + 1e-12 + ''') + print("Success" if result[0] else "Failed!") + + result = number_expr.runTests(''' + # stray character + 100Z + # missing leading digit before '.' + -.100 + # too many '.' + 3.14.159 + ''', failureTests=True) + print("Success" if result[0] else "Failed!") + prints:: + # unsigned integer + 100 + [100] + + # negative integer + -100 + [-100] + + # float with scientific notation + 6.02e23 + [6.02e+23] + + # integer with scientific notation + 1e-12 + [1e-12] + + Success + + # stray character + 100Z + ^ + FAIL: Expected end of text (at char 3), (line:1, col:4) + + # missing leading digit before '.' + -.100 + ^ + FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) + + # too many '.' + 3.14.159 + ^ + FAIL: Expected end of text (at char 4), (line:1, col:5) + + Success + + Each test string must be on a single line. If you want to test a string that spans multiple + lines, create a test like this:: + + expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") + + (Note that this is a raw string literal, you must include the leading 'r'.) + """ + if isinstance(tests, basestring): + tests = list(map(str.strip, tests.rstrip().splitlines())) + if isinstance(comment, basestring): + comment = Literal(comment) + allResults = [] + comments = [] + success = True + for t in tests: + if comment is not None and comment.matches(t, False) or comments and not t: + comments.append(t) + continue + if not t: + continue + out = ['\n'.join(comments), t] + comments = [] + try: + t = t.replace(r'\n','\n') + result = self.parseString(t, parseAll=parseAll) + out.append(result.dump(full=fullDump)) + success = success and not failureTests + except ParseBaseException as pe: + fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" + if '\n' in t: + out.append(line(pe.loc, t)) + out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) + else: + out.append(' '*pe.loc + '^' + fatal) + out.append("FAIL: " + str(pe)) + success = success and failureTests + result = pe + except Exception as exc: + out.append("FAIL-EXCEPTION: " + str(exc)) + success = success and failureTests + result = exc + + if printResults: + if fullDump: + out.append('') + print('\n'.join(out)) + + allResults.append((t, result)) + + return success, allResults + + +class Token(ParserElement): + """ + Abstract C{ParserElement} subclass, for defining atomic matching patterns. + """ + def __init__( self ): + super(Token,self).__init__( savelist=False ) + + +class Empty(Token): + """ + An empty token, will always match. + """ + def __init__( self ): + super(Empty,self).__init__() + self.name = "Empty" + self.mayReturnEmpty = True + self.mayIndexError = False + + +class NoMatch(Token): + """ + A token that will never match. + """ + def __init__( self ): + super(NoMatch,self).__init__() + self.name = "NoMatch" + self.mayReturnEmpty = True + self.mayIndexError = False + self.errmsg = "Unmatchable token" + + def parseImpl( self, instring, loc, doActions=True ): + raise ParseException(instring, loc, self.errmsg, self) + + +class Literal(Token): + """ + Token to exactly match a specified string. + + Example:: + Literal('blah').parseString('blah') # -> ['blah'] + Literal('blah').parseString('blahfooblah') # -> ['blah'] + Literal('blah').parseString('bla') # -> Exception: Expected "blah" + + For case-insensitive matching, use L{CaselessLiteral}. + + For keyword matching (force word break before and after the matched string), + use L{Keyword} or L{CaselessKeyword}. + """ + def __init__( self, matchString ): + super(Literal,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Literal; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.__class__ = Empty + self.name = '"%s"' % _ustr(self.match) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + + # Performance tuning: this routine gets called a *lot* + # if this is a single character match string and the first character matches, + # short-circuit as quickly as possible, and avoid calling startswith + #~ @profile + def parseImpl( self, instring, loc, doActions=True ): + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) +_L = Literal +ParserElement._literalStringClass = Literal + +class Keyword(Token): + """ + Token to exactly match a specified string as a keyword, that is, it must be + immediately followed by a non-keyword character. Compare with C{L{Literal}}: + - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. + - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} + Accepts two optional constructor arguments in addition to the keyword string: + - C{identChars} is a string of characters that would be valid identifier characters, + defaulting to all alphanumerics + "_" and "$" + - C{caseless} allows case-insensitive matching, default is C{False}. + + Example:: + Keyword("start").parseString("start") # -> ['start'] + Keyword("start").parseString("starting") # -> Exception + + For case-insensitive matching, use L{CaselessKeyword}. + """ + DEFAULT_KEYWORD_CHARS = alphanums+"_$" + + def __init__( self, matchString, identChars=None, caseless=False ): + super(Keyword,self).__init__() + if identChars is None: + identChars = Keyword.DEFAULT_KEYWORD_CHARS + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Keyword; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.name = '"%s"' % self.match + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + self.caseless = caseless + if caseless: + self.caselessmatch = matchString.upper() + identChars = identChars.upper() + self.identChars = set(identChars) + + def parseImpl( self, instring, loc, doActions=True ): + if self.caseless: + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and + (loc == 0 or instring[loc-1].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + else: + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and + (loc == 0 or instring[loc-1] not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + + def copy(self): + c = super(Keyword,self).copy() + c.identChars = Keyword.DEFAULT_KEYWORD_CHARS + return c + + @staticmethod + def setDefaultKeywordChars( chars ): + """Overrides the default Keyword chars + """ + Keyword.DEFAULT_KEYWORD_CHARS = chars + +class CaselessLiteral(Literal): + """ + Token to match a specified string, ignoring case of letters. + Note: the matched results will always be in the case of the given + match string, NOT the case of the input text. + + Example:: + OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] + + (Contrast with example for L{CaselessKeyword}.) + """ + def __init__( self, matchString ): + super(CaselessLiteral,self).__init__( matchString.upper() ) + # Preserve the defining literal. + self.returnString = matchString + self.name = "'%s'" % self.returnString + self.errmsg = "Expected " + self.name + + def parseImpl( self, instring, loc, doActions=True ): + if instring[ loc:loc+self.matchLen ].upper() == self.match: + return loc+self.matchLen, self.returnString + raise ParseException(instring, loc, self.errmsg, self) + +class CaselessKeyword(Keyword): + """ + Caseless version of L{Keyword}. + + Example:: + OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] + + (Contrast with example for L{CaselessLiteral}.) + """ + def __init__( self, matchString, identChars=None ): + super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) + + def parseImpl( self, instring, loc, doActions=True ): + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + +class CloseMatch(Token): + """ + A variation on L{Literal} which matches "close" matches, that is, + strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: + - C{match_string} - string to be matched + - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match + + The results from a successful parse will contain the matched text from the input string and the following named results: + - C{mismatches} - a list of the positions within the match_string where mismatches were found + - C{original} - the original match_string used to compare against the input string + + If C{mismatches} is an empty list, then the match was an exact match. + + Example:: + patt = CloseMatch("ATCATCGAATGGA") + patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) + patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) + + # exact match + patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) + + # close match allowing up to 2 mismatches + patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) + patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) + """ + def __init__(self, match_string, maxMismatches=1): + super(CloseMatch,self).__init__() + self.name = match_string + self.match_string = match_string + self.maxMismatches = maxMismatches + self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) + self.mayIndexError = False + self.mayReturnEmpty = False + + def parseImpl( self, instring, loc, doActions=True ): + start = loc + instrlen = len(instring) + maxloc = start + len(self.match_string) + + if maxloc <= instrlen: + match_string = self.match_string + match_stringloc = 0 + mismatches = [] + maxMismatches = self.maxMismatches + + for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): + src,mat = s_m + if src != mat: + mismatches.append(match_stringloc) + if len(mismatches) > maxMismatches: + break + else: + loc = match_stringloc + 1 + results = ParseResults([instring[start:loc]]) + results['original'] = self.match_string + results['mismatches'] = mismatches + return loc, results + + raise ParseException(instring, loc, self.errmsg, self) + + +class Word(Token): + """ + Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, + an optional string containing allowed body characters (if omitted, + defaults to the initial character set), and an optional minimum, + maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. An optional + C{excludeChars} parameter can list characters that might be found in + the input C{bodyChars} string; useful to define a word of all printables + except for one or two characters, for instance. + + L{srange} is useful for defining custom character set strings for defining + C{Word} expressions, using range notation from regular expression character sets. + + A common mistake is to use C{Word} to match a specific literal string, as in + C{Word("Address")}. Remember that C{Word} uses the string argument to define + I{sets} of matchable characters. This expression would match "Add", "AAA", + "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. + To match an exact literal string, use L{Literal} or L{Keyword}. + + pyparsing includes helper strings for building Words: + - L{alphas} + - L{nums} + - L{alphanums} + - L{hexnums} + - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) + - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) + - L{printables} (any non-whitespace character) + + Example:: + # a word composed of digits + integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) + + # a word with a leading capital, and zero or more lowercase + capital_word = Word(alphas.upper(), alphas.lower()) + + # hostnames are alphanumeric, with leading alpha, and '-' + hostname = Word(alphas, alphanums+'-') + + # roman numeral (not a strict parser, accepts invalid mix of characters) + roman = Word("IVXLCDM") + + # any string of non-whitespace characters, except for ',' + csv_value = Word(printables, excludeChars=",") + """ + def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): + super(Word,self).__init__() + if excludeChars: + initChars = ''.join(c for c in initChars if c not in excludeChars) + if bodyChars: + bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) + self.initCharsOrig = initChars + self.initChars = set(initChars) + if bodyChars : + self.bodyCharsOrig = bodyChars + self.bodyChars = set(bodyChars) + else: + self.bodyCharsOrig = initChars + self.bodyChars = set(initChars) + + self.maxSpecified = max > 0 + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.asKeyword = asKeyword + + if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): + if self.bodyCharsOrig == self.initCharsOrig: + self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) + elif len(self.initCharsOrig) == 1: + self.reString = "%s[%s]*" % \ + (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + else: + self.reString = "[%s][%s]*" % \ + (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + if self.asKeyword: + self.reString = r"\b"+self.reString+r"\b" + try: + self.re = re.compile( self.reString ) + except Exception: + self.re = None + + def parseImpl( self, instring, loc, doActions=True ): + if self.re: + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + return loc, result.group() + + if not(instring[ loc ] in self.initChars): + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + instrlen = len(instring) + bodychars = self.bodyChars + maxloc = start + self.maxLen + maxloc = min( maxloc, instrlen ) + while loc < maxloc and instring[loc] in bodychars: + loc += 1 + + throwException = False + if loc - start < self.minLen: + throwException = True + if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + throwException = True + if self.asKeyword: + if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): + throwException = True + + if throwException: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(Word,self).__str__() + except Exception: + pass + + + if self.strRepr is None: + + def charsAsStr(s): + if len(s)>4: + return s[:4]+"..." + else: + return s + + if ( self.initCharsOrig != self.bodyCharsOrig ): + self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) + else: + self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) + + return self.strRepr + + +class Regex(Token): + r""" + Token for matching strings that match a given regular expression. + Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. + If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as + named parse results. + + Example:: + realnum = Regex(r"[+-]?\d+\.\d*") + date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') + # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression + roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") + """ + compiledREtype = type(re.compile("[A-Z]")) + def __init__( self, pattern, flags=0): + """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" + super(Regex,self).__init__() + + if isinstance(pattern, basestring): + if not pattern: + warnings.warn("null string passed to Regex; use Empty() instead", + SyntaxWarning, stacklevel=2) + + self.pattern = pattern + self.flags = flags + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % pattern, + SyntaxWarning, stacklevel=2) + raise + + elif isinstance(pattern, Regex.compiledREtype): + self.re = pattern + self.pattern = \ + self.reString = str(pattern) + self.flags = flags + + else: + raise ValueError("Regex may only be constructed with a string or a compiled RE object") + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + d = result.groupdict() + ret = ParseResults(result.group()) + if d: + for k in d: + ret[k] = d[k] + return loc,ret + + def __str__( self ): + try: + return super(Regex,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "Re:(%s)" % repr(self.pattern) + + return self.strRepr + + +class QuotedString(Token): + r""" + Token for matching strings that are delimited by quoting characters. + + Defined with the following parameters: + - quoteChar - string of one or more characters defining the quote delimiting string + - escChar - character to escape quotes, typically backslash (default=C{None}) + - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) + - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) + - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) + - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) + - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) + + Example:: + qs = QuotedString('"') + print(qs.searchString('lsjdf "This is the quote" sldjf')) + complex_qs = QuotedString('{{', endQuoteChar='}}') + print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) + sql_qs = QuotedString('"', escQuote='""') + print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) + prints:: + [['This is the quote']] + [['This is the "quote"']] + [['This is the quote with "embedded" quotes']] + """ + def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): + super(QuotedString,self).__init__() + + # remove white space from quote chars - wont work anyway + quoteChar = quoteChar.strip() + if not quoteChar: + warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if not endQuoteChar: + warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + self.convertWhitespaceEscapes = convertWhitespaceEscapes + + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = r'%s(?:[^%s%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + else: + self.flags = 0 + self.pattern = r'%s(?:[^%s\n\r%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + if len(self.endQuoteChar) > 1: + self.pattern += ( + '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' + ) + if escQuote: + self.pattern += (r'|(?:%s)' % re.escape(escQuote)) + if escChar: + self.pattern += (r'|(?:%s.)' % re.escape(escChar)) + self.escCharReplacePattern = re.escape(self.escChar)+"(.)" + self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, + SyntaxWarning, stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.group() + + if self.unquoteResults: + + # strip off quotes + ret = ret[self.quoteCharLen:-self.endQuoteCharLen] + + if isinstance(ret,basestring): + # replace escaped whitespace + if '\\' in ret and self.convertWhitespaceEscapes: + ws_map = { + r'\t' : '\t', + r'\n' : '\n', + r'\f' : '\f', + r'\r' : '\r', + } + for wslit,wschar in ws_map.items(): + ret = ret.replace(wslit, wschar) + + # replace escaped characters + if self.escChar: + ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) + + # replace escaped quotes + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + + return loc, ret + + def __str__( self ): + try: + return super(QuotedString,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + + return self.strRepr + + +class CharsNotIn(Token): + """ + Token for matching words composed of characters I{not} in a given set (will + include whitespace in matched characters if not listed in the provided exclusion set - see example). + Defined with string containing all disallowed characters, and an optional + minimum, maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + + Example:: + # define a comma-separated-value as anything that is not a ',' + csv_value = CharsNotIn(',') + print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) + prints:: + ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] + """ + def __init__( self, notChars, min=1, max=0, exact=0 ): + super(CharsNotIn,self).__init__() + self.skipWhitespace = False + self.notChars = notChars + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = ( self.minLen == 0 ) + self.mayIndexError = False + + def parseImpl( self, instring, loc, doActions=True ): + if instring[loc] in self.notChars: + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + notchars = self.notChars + maxlen = min( start+self.maxLen, len(instring) ) + while loc < maxlen and \ + (instring[loc] not in notchars): + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(CharsNotIn, self).__str__() + except Exception: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[:4] + else: + self.strRepr = "!W:(%s)" % self.notChars + + return self.strRepr + +class White(Token): + """ + Special matching class for matching whitespace. Normally, whitespace is ignored + by pyparsing grammars. This class is included when some whitespace structures + are significant. Define with a string containing the whitespace characters to be + matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, + as defined for the C{L{Word}} class. + """ + whiteStrs = { + " " : "<SPC>", + "\t": "<TAB>", + "\n": "<LF>", + "\r": "<CR>", + "\f": "<FF>", + } + def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): + super(White,self).__init__() + self.matchWhite = ws + self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) + #~ self.leaveWhitespace() + self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl( self, instring, loc, doActions=True ): + if not(instring[ loc ] in self.matchWhite): + raise ParseException(instring, loc, self.errmsg, self) + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min( maxloc, len(instring) ) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + +class _PositionToken(Token): + def __init__( self ): + super(_PositionToken,self).__init__() + self.name=self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + +class GoToColumn(_PositionToken): + """ + Token to advance to a specific column of input text; useful for tabular report scraping. + """ + def __init__( self, colno ): + super(GoToColumn,self).__init__() + self.col = colno + + def preParse( self, instring, loc ): + if col(loc,instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + thiscol = col( loc, instring ) + if thiscol > self.col: + raise ParseException( instring, loc, "Text not in expected column", self ) + newloc = loc + self.col - thiscol + ret = instring[ loc: newloc ] + return newloc, ret + + +class LineStart(_PositionToken): + """ + Matches if current position is at the beginning of a line within the parse string + + Example:: + + test = '''\ + AAA this line + AAA and this line + AAA but not this one + B AAA and definitely not this one + ''' + + for t in (LineStart() + 'AAA' + restOfLine).searchString(test): + print(t) + + Prints:: + ['AAA', ' this line'] + ['AAA', ' and this line'] + + """ + def __init__( self ): + super(LineStart,self).__init__() + self.errmsg = "Expected start of line" + + def parseImpl( self, instring, loc, doActions=True ): + if col(loc, instring) == 1: + return loc, [] + raise ParseException(instring, loc, self.errmsg, self) + +class LineEnd(_PositionToken): + """ + Matches if current position is at the end of a line within the parse string + """ + def __init__( self ): + super(LineEnd,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected end of line" + + def parseImpl( self, instring, loc, doActions=True ): + if loc<len(instring): + if instring[loc] == "\n": + return loc+1, "\n" + else: + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class StringStart(_PositionToken): + """ + Matches if current position is at the beginning of the parse string + """ + def __init__( self ): + super(StringStart,self).__init__() + self.errmsg = "Expected start of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc != 0: + # see if entire string up to here is just whitespace and ignoreables + if loc != self.preParse( instring, 0 ): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class StringEnd(_PositionToken): + """ + Matches if current position is at the end of the parse string + """ + def __init__( self ): + super(StringEnd,self).__init__() + self.errmsg = "Expected end of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc < len(instring): + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + elif loc > len(instring): + return loc, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class WordStart(_PositionToken): + """ + Matches if the current position is at the beginning of a Word, and + is not preceded by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of + the string being parsed, or at the beginning of a line. + """ + def __init__(self, wordChars = printables): + super(WordStart,self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True ): + if loc != 0: + if (instring[loc-1] in self.wordChars or + instring[loc] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class WordEnd(_PositionToken): + """ + Matches if the current position is at the end of a Word, and + is not followed by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of + the string being parsed, or at the end of a line. + """ + def __init__(self, wordChars = printables): + super(WordEnd,self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True ): + instrlen = len(instring) + if instrlen>0 and loc<instrlen: + if (instring[loc] in self.wordChars or + instring[loc-1] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + +class ParseExpression(ParserElement): + """ + Abstract subclass of ParserElement, for combining and post-processing parsed tokens. + """ + def __init__( self, exprs, savelist = False ): + super(ParseExpression,self).__init__(savelist) + if isinstance( exprs, _generatorType ): + exprs = list(exprs) + + if isinstance( exprs, basestring ): + self.exprs = [ ParserElement._literalStringClass( exprs ) ] + elif isinstance( exprs, Iterable ): + exprs = list(exprs) + # if sequence of strings provided, wrap with Literal + if all(isinstance(expr, basestring) for expr in exprs): + exprs = map(ParserElement._literalStringClass, exprs) + self.exprs = list(exprs) + else: + try: + self.exprs = list( exprs ) + except TypeError: + self.exprs = [ exprs ] + self.callPreparse = False + + def __getitem__( self, i ): + return self.exprs[i] + + def append( self, other ): + self.exprs.append( other ) + self.strRepr = None + return self + + def leaveWhitespace( self ): + """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on + all contained expressions.""" + self.skipWhitespace = False + self.exprs = [ e.copy() for e in self.exprs ] + for e in self.exprs: + e.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + else: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + return self + + def __str__( self ): + try: + return super(ParseExpression,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) + return self.strRepr + + def streamline( self ): + super(ParseExpression,self).streamline() + + for e in self.exprs: + e.streamline() + + # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) + # but only if there are no parse actions or resultsNames on the nested And's + # (likewise for Or's and MatchFirst's) + if ( len(self.exprs) == 2 ): + other = self.exprs[0] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = other.exprs[:] + [ self.exprs[1] ] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + other = self.exprs[-1] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = self.exprs[:-1] + other.exprs[:] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + self.errmsg = "Expected " + _ustr(self) + + return self + + def setResultsName( self, name, listAllMatches=False ): + ret = super(ParseExpression,self).setResultsName(name,listAllMatches) + return ret + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + for e in self.exprs: + e.validate(tmp) + self.checkRecursion( [] ) + + def copy(self): + ret = super(ParseExpression,self).copy() + ret.exprs = [e.copy() for e in self.exprs] + return ret + +class And(ParseExpression): + """ + Requires all given C{ParseExpression}s to be found in the given order. + Expressions may be separated by whitespace. + May be constructed using the C{'+'} operator. + May also be constructed using the C{'-'} operator, which will suppress backtracking. + + Example:: + integer = Word(nums) + name_expr = OneOrMore(Word(alphas)) + + expr = And([integer("id"),name_expr("name"),integer("age")]) + # more easily written as: + expr = integer("id") + name_expr("name") + integer("age") + """ + + class _ErrorStop(Empty): + def __init__(self, *args, **kwargs): + super(And._ErrorStop,self).__init__(*args, **kwargs) + self.name = '-' + self.leaveWhitespace() + + def __init__( self, exprs, savelist = True ): + super(And,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.setWhitespaceChars( self.exprs[0].whiteChars ) + self.skipWhitespace = self.exprs[0].skipWhitespace + self.callPreparse = True + + def parseImpl( self, instring, loc, doActions=True ): + # pass False as last arg to _parse for first element, since we already + # pre-parsed the string as part of our And pre-parsing + loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) + errorStop = False + for e in self.exprs[1:]: + if isinstance(e, And._ErrorStop): + errorStop = True + continue + if errorStop: + try: + loc, exprtokens = e._parse( instring, loc, doActions ) + except ParseSyntaxException: + raise + except ParseBaseException as pe: + pe.__traceback__ = None + raise ParseSyntaxException._from_exception(pe) + except IndexError: + raise ParseSyntaxException(instring, len(instring), self.errmsg, self) + else: + loc, exprtokens = e._parse( instring, loc, doActions ) + if exprtokens or exprtokens.haskeys(): + resultlist += exprtokens + return loc, resultlist + + def __iadd__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #And( [ self, other ] ) + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + if not e.mayReturnEmpty: + break + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + +class Or(ParseExpression): + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the expression that matches the longest string will be used. + May be constructed using the C{'^'} operator. + + Example:: + # construct Or using '^' operator + + number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) + prints:: + [['123'], ['3.1416'], ['789']] + """ + def __init__( self, exprs, savelist = False ): + super(Or,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + matches = [] + for e in self.exprs: + try: + loc2 = e.tryParse( instring, loc ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + else: + # save match among all matches, to retry longest to shortest + matches.append((loc2, e)) + + if matches: + matches.sort(key=lambda x: -x[0]) + for _,e in matches: + try: + return e._parse( instring, loc, doActions ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + + def __ixor__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #Or( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class MatchFirst(ParseExpression): + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the first one listed is the one that will match. + May be constructed using the C{'|'} operator. + + Example:: + # construct MatchFirst using '|' operator + + # watch the order of expressions to match + number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] + + # put more selective expression first + number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) + print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] + """ + def __init__( self, exprs, savelist = False ): + super(MatchFirst,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse( instring, loc, doActions ) + return ret + except ParseException as err: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + + # only got here if no expression matched, raise exception for match that made it the furthest + else: + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #MatchFirst( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class Each(ParseExpression): + """ + Requires all given C{ParseExpression}s to be found, but in any order. + Expressions may be separated by whitespace. + May be constructed using the C{'&'} operator. + + Example:: + color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") + shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") + integer = Word(nums) + shape_attr = "shape:" + shape_type("shape") + posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") + color_attr = "color:" + color("color") + size_attr = "size:" + integer("size") + + # use Each (using operator '&') to accept attributes in any order + # (shape and posn are required, color and size are optional) + shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) + + shape_spec.runTests(''' + shape: SQUARE color: BLACK posn: 100, 120 + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + color:GREEN size:20 shape:TRIANGLE posn:20,40 + ''' + ) + prints:: + shape: SQUARE color: BLACK posn: 100, 120 + ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] + - color: BLACK + - posn: ['100', ',', '120'] + - x: 100 + - y: 120 + - shape: SQUARE + + + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] + - color: BLUE + - posn: ['50', ',', '80'] + - x: 50 + - y: 80 + - shape: CIRCLE + - size: 50 + + + color: GREEN size: 20 shape: TRIANGLE posn: 20,40 + ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] + - color: GREEN + - posn: ['20', ',', '40'] + - x: 20 + - y: 40 + - shape: TRIANGLE + - size: 20 + """ + def __init__( self, exprs, savelist = True ): + super(Each,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.skipWhitespace = True + self.initExprGroups = True + + def parseImpl( self, instring, loc, doActions=True ): + if self.initExprGroups: + self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) + opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] + opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] + self.optionals = opt1 + opt2 + self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] + self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] + self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[:] + tmpOpt = self.optionals[:] + matchOrder = [] + + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse( instring, tmpLoc ) + except ParseException: + failed.append(e) + else: + matchOrder.append(self.opt1map.get(id(e),e)) + if e in tmpReqd: + tmpReqd.remove(e) + elif e in tmpOpt: + tmpOpt.remove(e) + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join(_ustr(e) for e in tmpReqd) + raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) + + # add any unmatched Optionals, in case they have default values defined + matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] + + resultlist = [] + for e in matchOrder: + loc,results = e._parse(instring,loc,doActions) + resultlist.append(results) + + finalResults = sum(resultlist, ParseResults([])) + return loc, finalResults + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class ParseElementEnhance(ParserElement): + """ + Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. + """ + def __init__( self, expr, savelist=False ): + super(ParseElementEnhance,self).__init__(savelist) + if isinstance( expr, basestring ): + if issubclass(ParserElement._literalStringClass, Token): + expr = ParserElement._literalStringClass(expr) + else: + expr = ParserElement._literalStringClass(Literal(expr)) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars( expr.whiteChars ) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr is not None: + return self.expr._parse( instring, loc, doActions, callPreParse=False ) + else: + raise ParseException("",loc,self.errmsg,self) + + def leaveWhitespace( self ): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + else: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + return self + + def streamline( self ): + super(ParseElementEnhance,self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion( self, parseElementList ): + if self in parseElementList: + raise RecursiveGrammarException( parseElementList+[self] ) + subRecCheckList = parseElementList[:] + [ self ] + if self.expr is not None: + self.expr.checkRecursion( subRecCheckList ) + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion( [] ) + + def __str__( self ): + try: + return super(ParseElementEnhance,self).__str__() + except Exception: + pass + + if self.strRepr is None and self.expr is not None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + """ + Lookahead matching of the given parse expression. C{FollowedBy} + does I{not} advance the parsing position within the input string, it only + verifies that the specified parse expression matches at the current + position. C{FollowedBy} always returns a null token list. + + Example:: + # use FollowedBy to match a label only if it is followed by a ':' + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() + prints:: + [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] + """ + def __init__( self, expr ): + super(FollowedBy,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + self.expr.tryParse( instring, loc ) + return loc, [] + + +class NotAny(ParseElementEnhance): + """ + Lookahead to disallow matching with the given parse expression. C{NotAny} + does I{not} advance the parsing position within the input string, it only + verifies that the specified parse expression does I{not} match at the current + position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} + always returns a null token list. May be constructed using the '~' operator. + + Example:: + + """ + def __init__( self, expr ): + super(NotAny,self).__init__(expr) + #~ self.leaveWhitespace() + self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr.canParseNext(instring, loc): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + + return self.strRepr + +class _MultipleMatch(ParseElementEnhance): + def __init__( self, expr, stopOn=None): + super(_MultipleMatch, self).__init__(expr) + self.saveAsList = True + ender = stopOn + if isinstance(ender, basestring): + ender = ParserElement._literalStringClass(ender) + self.not_ender = ~ender if ender is not None else None + + def parseImpl( self, instring, loc, doActions=True ): + self_expr_parse = self.expr._parse + self_skip_ignorables = self._skipIgnorables + check_ender = self.not_ender is not None + if check_ender: + try_not_ender = self.not_ender.tryParse + + # must be at least one (but first see if we are the stopOn sentinel; + # if so, fail) + if check_ender: + try_not_ender(instring, loc) + loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) + try: + hasIgnoreExprs = (not not self.ignoreExprs) + while 1: + if check_ender: + try_not_ender(instring, loc) + if hasIgnoreExprs: + preloc = self_skip_ignorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self_expr_parse( instring, preloc, doActions ) + if tmptokens or tmptokens.haskeys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + +class OneOrMore(_MultipleMatch): + """ + Repetition of one or more of the given expression. + + Parameters: + - expr - expression that must match one or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: BLACK" + OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] + + # use stopOn attribute for OneOrMore to avoid reading label string as part of the data + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] + + # could also be written as + (attr_expr * (1,)).parseString(text).pprint() + """ + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + + return self.strRepr + +class ZeroOrMore(_MultipleMatch): + """ + Optional repetition of zero or more of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example: similar to L{OneOrMore} + """ + def __init__( self, expr, stopOn=None): + super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) + except (ParseException,IndexError): + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr + +class _NullToken(object): + def __bool__(self): + return False + __nonzero__ = __bool__ + def __str__(self): + return "" + +_optionalNotMatched = _NullToken() +class Optional(ParseElementEnhance): + """ + Optional matching of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - default (optional) - value to be returned if the optional expression is not found. + + Example:: + # US postal code can be a 5-digit zip, plus optional 4-digit qualifier + zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) + zip.runTests(''' + # traditional ZIP code + 12345 + + # ZIP+4 form + 12101-0001 + + # invalid ZIP + 98765- + ''') + prints:: + # traditional ZIP code + 12345 + ['12345'] + + # ZIP+4 form + 12101-0001 + ['12101-0001'] + + # invalid ZIP + 98765- + ^ + FAIL: Expected end of text (at char 5), (line:1, col:6) + """ + def __init__( self, expr, default=_optionalNotMatched ): + super(Optional,self).__init__( expr, savelist=False ) + self.saveAsList = self.expr.saveAsList + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + except (ParseException,IndexError): + if self.defaultValue is not _optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([ self.defaultValue ]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [ self.defaultValue ] + else: + tokens = [] + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + + return self.strRepr + +class SkipTo(ParseElementEnhance): + """ + Token for skipping over all undefined text until the matched expression is found. + + Parameters: + - expr - target expression marking the end of the data to be skipped + - include - (default=C{False}) if True, the target expression is also parsed + (the skipped text and target expression are returned as a 2-element list). + - ignore - (default=C{None}) used to define grammars (typically quoted strings and + comments) that might contain false matches to the target expression + - failOn - (default=C{None}) define expressions that are not allowed to be + included in the skipped test; if found before the target expression is found, + the SkipTo is not a match + + Example:: + report = ''' + Outstanding Issues Report - 1 Jan 2000 + + # | Severity | Description | Days Open + -----+----------+-------------------------------------------+----------- + 101 | Critical | Intermittent system crash | 6 + 94 | Cosmetic | Spelling error on Login ('log|n') | 14 + 79 | Minor | System slow when running too many reports | 47 + ''' + integer = Word(nums) + SEP = Suppress('|') + # use SkipTo to simply match everything up until the next SEP + # - ignore quoted strings, so that a '|' character inside a quoted string does not match + # - parse action will call token.strip() for each matched token, i.e., the description body + string_data = SkipTo(SEP, ignore=quotedString) + string_data.setParseAction(tokenMap(str.strip)) + ticket_expr = (integer("issue_num") + SEP + + string_data("sev") + SEP + + string_data("desc") + SEP + + integer("days_open")) + + for tkt in ticket_expr.searchString(report): + print tkt.dump() + prints:: + ['101', 'Critical', 'Intermittent system crash', '6'] + - days_open: 6 + - desc: Intermittent system crash + - issue_num: 101 + - sev: Critical + ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] + - days_open: 14 + - desc: Spelling error on Login ('log|n') + - issue_num: 94 + - sev: Cosmetic + ['79', 'Minor', 'System slow when running too many reports', '47'] + - days_open: 47 + - desc: System slow when running too many reports + - issue_num: 79 + - sev: Minor + """ + def __init__( self, other, include=False, ignore=None, failOn=None ): + super( SkipTo, self ).__init__( other ) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.asList = False + if isinstance(failOn, basestring): + self.failOn = ParserElement._literalStringClass(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + startloc = loc + instrlen = len(instring) + expr = self.expr + expr_parse = self.expr._parse + self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None + self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None + + tmploc = loc + while tmploc <= instrlen: + if self_failOn_canParseNext is not None: + # break if failOn expression matches + if self_failOn_canParseNext(instring, tmploc): + break + + if self_ignoreExpr_tryParse is not None: + # advance past ignore expressions + while 1: + try: + tmploc = self_ignoreExpr_tryParse(instring, tmploc) + except ParseBaseException: + break + + try: + expr_parse(instring, tmploc, doActions=False, callPreParse=False) + except (ParseException, IndexError): + # no match, advance loc in string + tmploc += 1 + else: + # matched skipto expr, done + break + + else: + # ran off the end of the input string without matching skipto expr, fail + raise ParseException(instring, loc, self.errmsg, self) + + # build up return values + loc = tmploc + skiptext = instring[startloc:loc] + skipresult = ParseResults(skiptext) + + if self.includeMatch: + loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) + skipresult += mat + + return loc, skipresult + +class Forward(ParseElementEnhance): + """ + Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. + + Note: take care when assigning to C{Forward} not to overlook precedence of operators. + Specifically, '|' has a lower precedence than '<<', so that:: + fwdExpr << a | b | c + will actually be evaluated as:: + (fwdExpr << a) | b | c + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the C{Forward}:: + fwdExpr << (a | b | c) + Converting to use the '<<=' operator instead will avoid this problem. + + See L{ParseResults.pprint} for an example of a recursive parser created using + C{Forward}. + """ + def __init__( self, other=None ): + super(Forward,self).__init__( other, savelist=False ) + + def __lshift__( self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass(other) + self.expr = other + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars( self.expr.whiteChars ) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return self + + def __ilshift__(self, other): + return self << other + + def leaveWhitespace( self ): + self.skipWhitespace = False + return self + + def streamline( self ): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate( self, validateTrace=[] ): + if self not in validateTrace: + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + return self.__class__.__name__ + ": ..." + + # stubbed out for now - creates awful memory and perf issues + self._revertClass = self.__class__ + self.__class__ = _ForwardNoRecurse + try: + if self.expr is not None: + retString = _ustr(self.expr) + else: + retString = "None" + finally: + self.__class__ = self._revertClass + return self.__class__.__name__ + ": " + retString + + def copy(self): + if self.expr is not None: + return super(Forward,self).copy() + else: + ret = Forward() + ret <<= self + return ret + +class _ForwardNoRecurse(Forward): + def __str__( self ): + return "..." + +class TokenConverter(ParseElementEnhance): + """ + Abstract subclass of C{ParseExpression}, for converting parsed results. + """ + def __init__( self, expr, savelist=False ): + super(TokenConverter,self).__init__( expr )#, savelist ) + self.saveAsList = False + +class Combine(TokenConverter): + """ + Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the input string; + this can be disabled by specifying C{'adjacent=False'} in the constructor. + + Example:: + real = Word(nums) + '.' + Word(nums) + print(real.parseString('3.1416')) # -> ['3', '.', '1416'] + # will also erroneously match the following + print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] + + real = Combine(Word(nums) + '.' + Word(nums)) + print(real.parseString('3.1416')) # -> ['3.1416'] + # no match when there are internal spaces + print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) + """ + def __init__( self, expr, joinString="", adjacent=True ): + super(Combine,self).__init__( expr ) + # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore( self, other ): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super( Combine, self).ignore( other ) + return self + + def postParse( self, instring, loc, tokenlist ): + retToks = tokenlist.copy() + del retToks[:] + retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) + + if self.resultsName and retToks.haskeys(): + return [ retToks ] + else: + return retToks + +class Group(TokenConverter): + """ + Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. + + Example:: + ident = Word(alphas) + num = Word(nums) + term = ident | num + func = ident + Optional(delimitedList(term)) + print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] + + func = ident + Group(Optional(delimitedList(term))) + print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] + """ + def __init__( self, expr ): + super(Group,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + return [ tokenlist ] + +class Dict(TokenConverter): + """ + Converter to return a repetitive expression as a list, but also as a dictionary. + Each element can also be referenced using the first token in the expression as its key. + Useful for tabular report scraping when the first column can be used as a item key. + + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + # print attributes as plain groups + print(OneOrMore(attr_expr).parseString(text).dump()) + + # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names + result = Dict(OneOrMore(Group(attr_expr))).parseString(text) + print(result.dump()) + + # access named fields as dict entries, or output as dict + print(result['shape']) + print(result.asDict()) + prints:: + ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} + See more examples at L{ParseResults} of accessing fields by results name. + """ + def __init__( self, expr ): + super(Dict,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + for i,tok in enumerate(tokenlist): + if len(tok) == 0: + continue + ikey = tok[0] + if isinstance(ikey,int): + ikey = _ustr(tok[0]).strip() + if len(tok)==1: + tokenlist[ikey] = _ParseResultsWithOffset("",i) + elif len(tok)==2 and not isinstance(tok[1],ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) + else: + dictvalue = tok.copy() #ParseResults(i) + del dictvalue[0] + if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) + else: + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) + + if self.resultsName: + return [ tokenlist ] + else: + return tokenlist + + +class Suppress(TokenConverter): + """ + Converter for ignoring the results of a parsed expression. + + Example:: + source = "a, b, c,d" + wd = Word(alphas) + wd_list1 = wd + ZeroOrMore(',' + wd) + print(wd_list1.parseString(source)) + + # often, delimiters that are useful during parsing are just in the + # way afterward - use Suppress to keep them out of the parsed output + wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) + print(wd_list2.parseString(source)) + prints:: + ['a', ',', 'b', ',', 'c', ',', 'd'] + ['a', 'b', 'c', 'd'] + (See also L{delimitedList}.) + """ + def postParse( self, instring, loc, tokenlist ): + return [] + + def suppress( self ): + return self + + +class OnlyOnce(object): + """ + Wrapper for parse actions, to ensure they are only called once. + """ + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + def __call__(self,s,l,t): + if not self.called: + results = self.callable(s,l,t) + self.called = True + return results + raise ParseException(s,l,"") + def reset(self): + self.called = False + +def traceParseAction(f): + """ + Decorator for debugging parse actions. + + When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} + When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. + + Example:: + wd = Word(alphas) + + @traceParseAction + def remove_duplicate_chars(tokens): + return ''.join(sorted(set(''.join(tokens)))) + + wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) + print(wds.parseString("slkdjs sld sldd sdlf sdljf")) + prints:: + >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) + <<leaving remove_duplicate_chars (ret: 'dfjkls') + ['dfjkls'] + """ + f = _trim_arity(f) + def z(*paArgs): + thisFunc = f.__name__ + s,l,t = paArgs[-3:] + if len(paArgs)>3: + thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc + sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) + try: + ret = f(*paArgs) + except Exception as exc: + sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) + raise + sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) + return ret + try: + z.__name__ = f.__name__ + except AttributeError: + pass + return z + +# +# global helpers +# +def delimitedList( expr, delim=",", combine=False ): + """ + Helper to define a delimited list of expressions - the delimiter defaults to ','. + By default, the list elements and delimiters can have intervening whitespace, and + comments, but this can be overridden by passing C{combine=True} in the constructor. + If C{combine} is set to C{True}, the matching tokens are returned as a single token + string, with the delimiters included; otherwise, the matching tokens are returned + as a list of tokens, with the delimiters suppressed. + + Example:: + delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] + delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] + """ + dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." + if combine: + return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) + else: + return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) + +def countedArray( expr, intExpr=None ): + """ + Helper to define a counted list of expressions. + This helper defines a pattern of the form:: + integer expr expr expr... + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. + + If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. + + Example:: + countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) + countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] + """ + arrayExpr = Forward() + def countFieldParseAction(s,l,t): + n = t[0] + arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) + return [] + if intExpr is None: + intExpr = Word(nums).setParseAction(lambda t:int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.setName("arrayLen") + intExpr.addParseAction(countFieldParseAction, callDuringTry=True) + return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') + +def _flatten(L): + ret = [] + for i in L: + if isinstance(i,list): + ret.extend(_flatten(i)) + else: + ret.append(i) + return ret + +def matchPreviousLiteral(expr): + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousLiteral(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches a + previous literal, will also match the leading C{"1:1"} in C{"1:10"}. + If this is not desired, use C{matchPreviousExpr}. + Do I{not} use with packrat parsing enabled. + """ + rep = Forward() + def copyTokenToRepeater(s,l,t): + if t: + if len(t) == 1: + rep << t[0] + else: + # flatten t tokens + tflat = _flatten(t.asList()) + rep << And(Literal(tt) for tt in tflat) + else: + rep << Empty() + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def matchPreviousExpr(expr): + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousExpr(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches by + expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; + the expressions are evaluated first, and then compared, so + C{"1"} is compared with C{"10"}. + Do I{not} use with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + def copyTokenToRepeater(s,l,t): + matchTokens = _flatten(t.asList()) + def mustMatchTheseTokens(s,l,t): + theseTokens = _flatten(t.asList()) + if theseTokens != matchTokens: + raise ParseException("",0,"") + rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def _escapeRegexRangeChars(s): + #~ escape these chars: ^-] + for c in r"\^-]": + s = s.replace(c,_bslash+c) + s = s.replace("\n",r"\n") + s = s.replace("\t",r"\t") + return _ustr(s) + +def oneOf( strs, caseless=False, useRegex=True ): + """ + Helper to quickly define a set of alternative Literals, and makes sure to do + longest-first testing when there is a conflict, regardless of the input order, + but returns a C{L{MatchFirst}} for best performance. + + Parameters: + - strs - a string of space-delimited literals, or a collection of string literals + - caseless - (default=C{False}) - treat all literals as caseless + - useRegex - (default=C{True}) - as an optimization, will generate a Regex + object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or + if creating a C{Regex} raises an exception) + + Example:: + comp_oper = oneOf("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) + prints:: + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] + """ + if caseless: + isequal = ( lambda a,b: a.upper() == b.upper() ) + masks = ( lambda a,b: b.upper().startswith(a.upper()) ) + parseElementClass = CaselessLiteral + else: + isequal = ( lambda a,b: a == b ) + masks = ( lambda a,b: b.startswith(a) ) + parseElementClass = Literal + + symbols = [] + if isinstance(strs,basestring): + symbols = strs.split() + elif isinstance(strs, Iterable): + symbols = list(strs) + else: + warnings.warn("Invalid argument to oneOf, expected string or iterable", + SyntaxWarning, stacklevel=2) + if not symbols: + return NoMatch() + + i = 0 + while i < len(symbols)-1: + cur = symbols[i] + for j,other in enumerate(symbols[i+1:]): + if ( isequal(other, cur) ): + del symbols[i+j+1] + break + elif ( masks(cur, other) ): + del symbols[i+j+1] + symbols.insert(i,other) + cur = other + break + else: + i += 1 + + if not caseless and useRegex: + #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) + try: + if len(symbols)==len("".join(symbols)): + return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) + else: + return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) + except Exception: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", + SyntaxWarning, stacklevel=2) + + + # last resort, just use MatchFirst + return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) + +def dictOf( key, value ): + """ + Helper to easily and clearly define a dictionary by specifying the respective patterns + for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens + in the proper order. The key pattern can include delimiting markers or punctuation, + as long as they are suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the C{Dict} results can include named token + fields. + + Example:: + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + print(OneOrMore(attr_expr).parseString(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) + + # similar to Dict, but simpler call format + result = dictOf(attr_label, attr_value).parseString(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.asDict()) + prints:: + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} + """ + return Dict( ZeroOrMore( Group ( key + value ) ) ) + +def originalTextFor(expr, asString=True): + """ + Helper to return the original, untokenized text for a given expression. Useful to + restore the parsed fields of an HTML start tag into the raw tag text itself, or to + revert separate tokens with intervening whitespace back to the original matching + input text. By default, returns astring containing the original parsed text. + + If the optional C{asString} argument is passed as C{False}, then the return value is a + C{L{ParseResults}} containing any results names that were originally matched, and a + single token containing the original matched text from the input string. So if + the expression passed to C{L{originalTextFor}} contains expressions with defined + results names, you must set C{asString} to C{False} if you want to preserve those + results name values. + + Example:: + src = "this is test <b> bold <i>text</i> </b> normal text " + for tag in ("b","i"): + opener,closer = makeHTMLTags(tag) + patt = originalTextFor(opener + SkipTo(closer) + closer) + print(patt.searchString(src)[0]) + prints:: + ['<b> bold <i>text</i> </b>'] + ['<i>text</i>'] + """ + locMarker = Empty().setParseAction(lambda s,loc,t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s,l,t: s[t._original_start:t._original_end] + else: + def extractText(s,l,t): + t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] + matchExpr.setParseAction(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs + return matchExpr + +def ungroup(expr): + """ + Helper to undo pyparsing's default grouping of And expressions, even + if all but one are non-empty. + """ + return TokenConverter(expr).setParseAction(lambda t:t[0]) + +def locatedExpr(expr): + """ + Helper to decorate a returned token with its starting and ending locations in the input string. + This helper adds the following results names: + - locn_start = location where matched expression begins + - locn_end = location where matched expression ends + - value = the actual parsed results + + Be careful if the input text contains C{<TAB>} characters, you may want to call + C{L{ParserElement.parseWithTabs}} + + Example:: + wd = Word(alphas) + for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): + print(match) + prints:: + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] + """ + locator = Empty().setParseAction(lambda s,l,t: l) + return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) + + +# convenience constants for positional expressions +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") + +_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" + +def srange(s): + r""" + Helper to easily define string ranges for use in Word construction. Borrows + syntax from regexp '[]' string range definitions:: + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + The input string must be enclosed in []'s, and the returned string is the expanded + character set joined into a single string. + The values enclosed in the []'s may be: + - a single character + - an escaped character with a leading backslash (such as C{\-} or C{\]}) + - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) + (C{\0x##} is also supported for backwards compatibility) + - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) + - a range of any of the above, separated by a dash (C{'a-z'}, etc.) + - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) + """ + _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) + try: + return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) + except Exception: + return "" + +def matchOnlyAtCol(n): + """ + Helper method for defining parse actions that require matching at a specific + column in the input text. + """ + def verifyCol(strg,locn,toks): + if col(locn,strg) != n: + raise ParseException(strg,locn,"matched token not at column %d" % n) + return verifyCol + +def replaceWith(replStr): + """ + Helper method for common parse actions that simply return a literal value. Especially + useful when used with C{L{transformString<ParserElement.transformString>}()}. + + Example:: + num = Word(nums).setParseAction(lambda toks: int(toks[0])) + na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) + term = na | num + + OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] + """ + return lambda s,l,t: [replStr] + +def removeQuotes(s,l,t): + """ + Helper parse action for removing quotation marks from parsed quoted strings. + + Example:: + # by default, quotation marks are included in parsed results + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] + + # use removeQuotes to strip quotation marks from parsed results + quotedString.setParseAction(removeQuotes) + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] + """ + return t[0][1:-1] + +def tokenMap(func, *args): + """ + Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional + args are passed, they are forwarded to the given function as additional arguments after + the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the + parsed data to an integer using base 16. + + Example (compare the last to example in L{ParserElement.transformString}:: + hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) + hex_ints.runTests(''' + 00 11 22 aa FF 0a 0d 1a + ''') + + upperword = Word(alphas).setParseAction(tokenMap(str.upper)) + OneOrMore(upperword).runTests(''' + my kingdom for a horse + ''') + + wd = Word(alphas).setParseAction(tokenMap(str.title)) + OneOrMore(wd).setParseAction(' '.join).runTests(''' + now is the winter of our discontent made glorious summer by this sun of york + ''') + prints:: + 00 11 22 aa FF 0a 0d 1a + [0, 17, 34, 170, 255, 10, 13, 26] + + my kingdom for a horse + ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] + + now is the winter of our discontent made glorious summer by this sun of york + ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] + """ + def pa(s,l,t): + return [func(tokn, *args) for tokn in t] + + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + pa.__name__ = func_name + + return pa + +upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) +"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" + +downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) +"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" + +def _makeTags(tagStr, xml): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr,basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas,alphanums+"_-:") + if (xml): + tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + else: + printablesLessRAbrack = "".join(c for c in printables if c not in ">") + tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ + Optional( Suppress("=") + tagAttrValue ) ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + closeTag = Combine(_L("</") + tagStr + ">") + + openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) + closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) + openTag.tag = resname + closeTag.tag = resname + return openTag, closeTag + +def makeHTMLTags(tagStr): + """ + Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches + tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. + + Example:: + text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' + # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple + a,a_end = makeHTMLTags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.searchString(text): + # attributes in the <A> tag (like "href" shown here) are also accessible as named results + print(link.link_text, '->', link.href) + prints:: + pyparsing -> http://pyparsing.wikispaces.com + """ + return _makeTags( tagStr, False ) + +def makeXMLTags(tagStr): + """ + Helper to construct opening and closing tag expressions for XML, given a tag name. Matches + tags only in the given upper/lower case. + + Example: similar to L{makeHTMLTags} + """ + return _makeTags( tagStr, True ) + +def withAttribute(*args,**attrDict): + """ + Helper to create a validating parse action to be used with start tags created + with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag + with a required attribute value, to avoid false matches on common tags such as + C{<TD>} or C{<DIV>}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + + Example:: + html = ''' + <div> + Some text + <div type="grid">1 4 0 1 0</div> + <div type="graph">1,3 2,3 1,1</div> + <div>this has no type</div> + </div> + + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """ + Simplified version of C{L{withAttribute}} when matching on a div class - made + difficult because C{class} is a reserved word in Python. + + Example:: + html = ''' + <div> + Some text + <div class="grid">1 4 0 1 0</div> + <div class="graph">1,3 2,3 1,1</div> + <div>this <div> has no class</div> + </div> + + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr : classname}) + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): + """ + Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. The generated parser will also recognize the use + of parentheses to override operator precedences (see example below). + + Note: if you define a deep operator list, you may see performance issues + when using infixNotation. See L{ParserElement.enablePackrat} for a + mechanism to potentially improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted); if the parse action + is passed a tuple or list of functions, this is equivalent to + calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) + - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) + - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) + + Example:: + # simple example of four-function arithmetic with ints and variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + prints:: + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + ret = Forward() + lastExpr = baseExpr | ( lpar + ret + rpar ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + if isinstance(pa, (tuple, list)): + matchExpr.setParseAction(*pa) + else: + matchExpr.setParseAction(pa) + thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) + lastExpr = thisExpr + ret <<= lastExpr + return ret + +operatorPrecedence = infixNotation +"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| + Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """ + Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression + - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression + - content - expression for items within the nested lists (default=C{None}) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + + Example:: + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR,RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + prints:: + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + ret.setName('nested %s%s expression' % (opener,closer)) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """ + Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=C{True}) + + A valid block must contain at least one C{blockStatement}. + + Example:: + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group( funcDecl + func_body ) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << ( funcDef | assignment | identifier ) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + prints:: + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName('indented block') + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form C{/* ... */}" + +htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment") +"Comment of the form C{<!-- ... -->}" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form C{// ... (to end of line)}" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" + +javaStyleComment = cppStyleComment +"Same as C{L{cppStyleComment}}" + +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form C{# ... (to end of line)}" + +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional( Word(" \t") + + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") +commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. + This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """ + Here are some common low-level expressions that may be useful in jump-starting parser development: + - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>}) + - common L{programming identifiers<identifier>} + - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>}) + - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>} + - L{UUID<uuid>} + - L{comma-separated list<comma_separated_list>} + Parse actions: + - C{L{convertToInteger}} + - C{L{convertToFloat}} + - C{L{convertToDate}} + - C{L{convertToDatetime}} + - C{L{stripHTMLTags}} + - C{L{upcaseTokens}} + - C{L{downcaseTokens}} + + Example:: + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + prints:: + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (C{0.0.0.0 - 255.255.255.255})" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) + + Example:: + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + prints:: + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """ + Helper to create a parse action for converting parsed datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) + + Example:: + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + prints:: + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (C{yyyy-mm-dd})" + + iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """ + Parse action to remove HTML tags from web page HTML source + + Example:: + # strip HTML links from normal text + text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' + td,td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + + print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + + Optional( White(" \t") ) ) ).streamline().setName("commaItem") + comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" + + +if __name__ == "__main__": + + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + + ident = Word(alphas, alphanums + "_$") + + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/six.py b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/six.py new file mode 100644 index 0000000000000000000000000000000000000000..190c0239cd7d7af82a6e0cbc8d68053fa2e3dfaf --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/_vendor/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/extern/__init__.py b/backend/test/lib/python3.8/site-packages/pkg_resources/extern/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c1eb9e998f8e117c82c176bc83ab1d350c729cd7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/extern/__init__.py @@ -0,0 +1,73 @@ +import sys + + +class VendorImporter: + """ + A PEP 302 meta path importer for finding optionally-vendored + or otherwise naturally-installed packages from root_name. + """ + + def __init__(self, root_name, vendored_names=(), vendor_pkg=None): + self.root_name = root_name + self.vendored_names = set(vendored_names) + self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') + + @property + def search_path(self): + """ + Search first the vendor package then as a natural package. + """ + yield self.vendor_pkg + '.' + yield '' + + def find_module(self, fullname, path=None): + """ + Return self when fullname starts with root_name and the + target module is one vendored through this importer. + """ + root, base, target = fullname.partition(self.root_name + '.') + if root: + return + if not any(map(target.startswith, self.vendored_names)): + return + return self + + def load_module(self, fullname): + """ + Iterate over the search path to locate and load fullname. + """ + root, base, target = fullname.partition(self.root_name + '.') + for prefix in self.search_path: + try: + extant = prefix + target + __import__(extant) + mod = sys.modules[extant] + sys.modules[fullname] = mod + # mysterious hack: + # Remove the reference to the extant package/module + # on later Python versions to cause relative imports + # in the vendor package to resolve the same modules + # as those going through this importer. + if prefix and sys.version_info > (3, 3): + del sys.modules[extant] + return mod + except ImportError: + pass + else: + raise ImportError( + "The '{target}' package is required; " + "normally this is bundled with this package so if you get " + "this warning, consult the packager of your " + "distribution.".format(**locals()) + ) + + def install(self): + """ + Install this importer into sys.meta_path if not already present. + """ + if self not in sys.meta_path: + sys.meta_path.append(self) + + +names = 'packaging', 'pyparsing', 'six', 'appdirs' +VendorImporter(__name__, names).install() diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9826cb9c8237cb4011370319ed2e5eb17a9c7cdf Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pkg_resources/extern/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pkg_resources/py31compat.py b/backend/test/lib/python3.8/site-packages/pkg_resources/py31compat.py new file mode 100644 index 0000000000000000000000000000000000000000..a381c424f9eaacb4126d4b8a474052551e34ccfb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pkg_resources/py31compat.py @@ -0,0 +1,23 @@ +import os +import errno +import sys + +from .extern import six + + +def _makedirs_31(path, exist_ok=False): + try: + os.makedirs(path) + except OSError as exc: + if not exist_ok or exc.errno != errno.EEXIST: + raise + + +# rely on compatibility behavior until mode considerations +# and exists_ok considerations are disentangled. +# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 +needs_makedirs = ( + six.PY2 or + (3, 4) <= sys.version_info < (3, 4, 1) +) +makedirs = _makedirs_31 if needs_makedirs else os.makedirs diff --git a/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/LICENSE b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..78fec59e20173232f8cc3f65f300ade0e0e09243 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/METADATA @@ -0,0 +1,465 @@ +Metadata-Version: 2.1 +Name: pymongo +Version: 4.5.0 +Summary: Python driver for MongoDB <http://www.mongodb.org> +Author: The MongoDB Python Team +License: Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +Project-URL: Homepage, http://github.com/mongodb/mongo-python-driver +Keywords: bson,gridfs,mongo,mongodb,pymongo +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Database +Classifier: Typing :: Typed +Requires-Python: >=3.7 +Description-Content-Type: text/x-rst +License-File: LICENSE +Requires-Dist: dnspython <3.0.0,>=1.16.0 +Provides-Extra: aws +Requires-Dist: pymongo-auth-aws <2.0.0 ; extra == 'aws' +Provides-Extra: encryption +Requires-Dist: pymongo[aws] ; extra == 'encryption' +Requires-Dist: pymongocrypt <2.0.0,>=1.6.0 ; extra == 'encryption' +Requires-Dist: certifi ; (os_name == "nt" or sys_platform == "darwin") and extra == 'encryption' +Provides-Extra: gssapi +Requires-Dist: pykerberos ; (os_name != "nt") and extra == 'gssapi' +Requires-Dist: winkerberos >=0.5.0 ; (os_name == "nt") and extra == 'gssapi' +Provides-Extra: ocsp +Requires-Dist: pyopenssl >=17.2.0 ; extra == 'ocsp' +Requires-Dist: requests <3.0.0 ; extra == 'ocsp' +Requires-Dist: cryptography >=2.5 ; extra == 'ocsp' +Requires-Dist: service-identity >=18.1.0 ; extra == 'ocsp' +Requires-Dist: certifi ; (os_name == "nt" or sys_platform == "darwin") and extra == 'ocsp' +Provides-Extra: snappy +Requires-Dist: python-snappy ; extra == 'snappy' +Provides-Extra: srv +Provides-Extra: tls +Provides-Extra: zstd +Requires-Dist: zstandard ; extra == 'zstd' + +======= +PyMongo +======= +:Info: See `the mongo site <http://www.mongodb.org>`_ for more information. See `GitHub <http://github.com/mongodb/mongo-python-driver>`_ for the latest source. +:Documentation: Available at `pymongo.readthedocs.io <https://pymongo.readthedocs.io/en/stable/>`_ +:Author: The MongoDB Python Team + +About +===== + +The PyMongo distribution contains tools for interacting with MongoDB +database from Python. The ``bson`` package is an implementation of +the `BSON format <http://bsonspec.org>`_ for Python. The ``pymongo`` +package is a native Python driver for MongoDB. The ``gridfs`` package +is a `gridfs +<https://github.com/mongodb/specifications/blob/master/source/gridfs/gridfs-spec.rst/>`_ +implementation on top of ``pymongo``. + +PyMongo supports MongoDB 3.6, 4.0, 4.2, 4.4, 5.0, 6.0, and 7.0. + +Support / Feedback +================== + +For issues with, questions about, or feedback for PyMongo, please look into +our `support channels <https://support.mongodb.com/welcome>`_. Please +do not email any of the PyMongo developers directly with issues or +questions - you're more likely to get an answer on `StackOverflow <https://stackoverflow.com/questions/tagged/mongodb>`_ +(using a "mongodb" tag). + +Bugs / Feature Requests +======================= + +Think you’ve found a bug? Want to see a new feature in PyMongo? Please open a +case in our issue management tool, JIRA: + +- `Create an account and login <https://jira.mongodb.org>`_. +- Navigate to `the PYTHON project <https://jira.mongodb.org/browse/PYTHON>`_. +- Click **Create Issue** - Please provide as much information as possible about the issue type and how to reproduce it. + +Bug reports in JIRA for all driver projects (i.e. PYTHON, CSHARP, JAVA) and the +Core Server (i.e. SERVER) project are **public**. + +How To Ask For Help +------------------- + +Please include all of the following information when opening an issue: + +- Detailed steps to reproduce the problem, including full traceback, if possible. +- The exact python version used, with patch level:: + + $ python -c "import sys; print(sys.version)" + +- The exact version of PyMongo used, with patch level:: + + $ python -c "import pymongo; print(pymongo.version); print(pymongo.has_c())" + +- The operating system and version (e.g. Windows 7, OSX 10.8, ...) +- Web framework or asynchronous network library used, if any, with version (e.g. + Django 1.7, mod_wsgi 4.3.0, gevent 1.0.1, Tornado 4.0.2, ...) + +Security Vulnerabilities +------------------------ + +If you’ve identified a security vulnerability in a driver or any other +MongoDB project, please report it according to the `instructions here +<https://www.mongodb.com/docs/manual/tutorial/create-a-vulnerability-report/>`_. + +Installation +============ + +PyMongo can be installed with `pip <http://pypi.python.org/pypi/pip>`_:: + + $ python -m pip install pymongo + +Or ``easy_install`` from +`setuptools <http://pypi.python.org/pypi/setuptools>`_:: + + $ python -m easy_install pymongo + +You can also download the project source and do:: + + $ pip install . + +Do **not** install the "bson" package from pypi. PyMongo comes with its own +bson package; doing "easy_install bson" installs a third-party package that +is incompatible with PyMongo. + +Dependencies +============ + +PyMongo supports CPython 3.7+ and PyPy3.7+. + +Required dependencies: + +Support for mongodb+srv:// URIs requires `dnspython +<https://pypi.python.org/pypi/dnspython>`_ + +Optional dependencies: + +GSSAPI authentication requires `pykerberos +<https://pypi.python.org/pypi/pykerberos>`_ on Unix or `WinKerberos +<https://pypi.python.org/pypi/winkerberos>`_ on Windows. The correct +dependency can be installed automatically along with PyMongo:: + + $ python -m pip install "pymongo[gssapi]" + +MONGODB-AWS authentication requires `pymongo-auth-aws +<https://pypi.org/project/pymongo-auth-aws/>`_:: + + $ python -m pip install "pymongo[aws]" + +OCSP (Online Certificate Status Protocol) requires `PyOpenSSL +<https://pypi.org/project/pyOpenSSL/>`_, `requests +<https://pypi.org/project/requests/>`_, `service_identity +<https://pypi.org/project/service_identity/>`_ and may +require `certifi +<https://pypi.python.org/pypi/certifi>`_:: + + $ python -m pip install "pymongo[ocsp]" + +Wire protocol compression with snappy requires `python-snappy +<https://pypi.org/project/python-snappy>`_:: + + $ python -m pip install "pymongo[snappy]" + +Wire protocol compression with zstandard requires `zstandard +<https://pypi.org/project/zstandard>`_:: + + $ python -m pip install "pymongo[zstd]" + +Client-Side Field Level Encryption requires `pymongocrypt +<https://pypi.org/project/pymongocrypt/>`_ and +`pymongo-auth-aws <https://pypi.org/project/pymongo-auth-aws/>`_:: + + $ python -m pip install "pymongo[encryption]" + +You can install all dependencies automatically with the following +command:: + + $ python -m pip install "pymongo[gssapi,aws,ocsp,snappy,zstd,encryption]" + +Additional dependencies are: + +- (to generate documentation or run tests) tox_ + +Examples +======== +Here's a basic example (for more see the *examples* section of the docs): + +.. code-block:: pycon + + >>> import pymongo + >>> client = pymongo.MongoClient("localhost", 27017) + >>> db = client.test + >>> db.name + 'test' + >>> db.my_collection + Collection(Database(MongoClient('localhost', 27017), 'test'), 'my_collection') + >>> db.my_collection.insert_one({"x": 10}).inserted_id + ObjectId('4aba15ebe23f6b53b0000000') + >>> db.my_collection.insert_one({"x": 8}).inserted_id + ObjectId('4aba160ee23f6b543e000000') + >>> db.my_collection.insert_one({"x": 11}).inserted_id + ObjectId('4aba160ee23f6b543e000002') + >>> db.my_collection.find_one() + {'x': 10, '_id': ObjectId('4aba15ebe23f6b53b0000000')} + >>> for item in db.my_collection.find(): + ... print(item["x"]) + ... + 10 + 8 + 11 + >>> db.my_collection.create_index("x") + 'x_1' + >>> for item in db.my_collection.find().sort("x", pymongo.ASCENDING): + ... print(item["x"]) + ... + 8 + 10 + 11 + >>> [item["x"] for item in db.my_collection.find().limit(2).skip(1)] + [8, 11] + +Documentation +============= + +Documentation is available at `pymongo.readthedocs.io <https://pymongo.readthedocs.io/en/stable/>`_. + +Documentation can be generated by running **tox -m doc**. Generated documentation can be found in the +*doc/build/html/* directory. + +Learning Resources +================== + +MongoDB Learn - `Python courses <https://learn.mongodb.com/catalog?labels=%5B%22Language%22%5D&values=%5B%22Python%22%5D>`_. +`Python Articles on Developer Center <https://www.mongodb.com/developer/languages/python/>`_. + +Testing +======= + +The easiest way to run the tests is to run **tox -m test** in +the root of the distribution. + +To verify that PyMongo works with Gevent's monkey-patching:: + + $ python green_framework_test.py gevent + +Or with Eventlet's:: + + $ python green_framework_test.py eventlet + +.. _tox: https://tox.wiki/en/latest/index.html diff --git a/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..8fc17235801b8a6662996bad185c7a56576e0da7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/RECORD @@ -0,0 +1,185 @@ +bson/__init__.py,sha256=zWOBM5wK8Wd3Q_4t2AUwcC8hg8uofwWl7VcEgSpjD4Y,49269 +bson/__pycache__/__init__.cpython-38.pyc,, +bson/__pycache__/_helpers.cpython-38.pyc,, +bson/__pycache__/binary.cpython-38.pyc,, +bson/__pycache__/code.cpython-38.pyc,, +bson/__pycache__/codec_options.cpython-38.pyc,, +bson/__pycache__/datetime_ms.cpython-38.pyc,, +bson/__pycache__/dbref.cpython-38.pyc,, +bson/__pycache__/decimal128.cpython-38.pyc,, +bson/__pycache__/errors.cpython-38.pyc,, +bson/__pycache__/int64.cpython-38.pyc,, +bson/__pycache__/json_util.cpython-38.pyc,, +bson/__pycache__/max_key.cpython-38.pyc,, +bson/__pycache__/min_key.cpython-38.pyc,, +bson/__pycache__/objectid.cpython-38.pyc,, +bson/__pycache__/raw_bson.cpython-38.pyc,, +bson/__pycache__/regex.cpython-38.pyc,, +bson/__pycache__/son.cpython-38.pyc,, +bson/__pycache__/timestamp.cpython-38.pyc,, +bson/__pycache__/typings.cpython-38.pyc,, +bson/__pycache__/tz_util.cpython-38.pyc,, +bson/_cbson.cpython-38-x86_64-linux-gnu.so,sha256=PQT2-xpWYPU7fukb1WRlhKsL4QwlMgLDOz0b71h10hA,414496 +bson/_cbsonmodule.c,sha256=EkukkGE-jp3kyP-J0080KlRsiKSlYvkojH_vHcWnOg8,107636 +bson/_cbsonmodule.h,sha256=k5o-hPaIZz7444Jooaty3Gc3NnGnkXUPSB24-9UUtLg,8082 +bson/_helpers.py,sha256=M7OaNGEKGDkAStDs7m0885vqKCgtk-YcYEGEzlyaaO8,1330 +bson/binary.py,sha256=53Tefjfe4DoB5CSO5_X8oJRyyjVWqj7YnHBI11G0hdY,12373 +bson/bson-endian.h,sha256=c8kC3A4W2j_AvVLDJPo0w5UL15b6C7f14F0mRU19kMo,6573 +bson/buffer.c,sha256=7k5sRdnvD7ICNRW9bCJp2tFXj-tdyyOH6IqGwYGfIk4,4450 +bson/buffer.h,sha256=gk5piESiLAsMc7ktixf8-8Bv-CNQZguCM3mvVBM3FZw,1828 +bson/code.py,sha256=uY_I-Zgmv18m7cJmmzOuezGPCvoSS7ZfYkXDQy634rA,3442 +bson/codec_options.py,sha256=IfbDuqlEgPRX-xrV8Vioypdxm25yAgGsbv3BpfJdlTE,19679 +bson/datetime_ms.py,sha256=4g4pdZaN1YhoJqe9N7r06XPhb0WQDUzqwrARGjAtvVk,6062 +bson/dbref.py,sha256=QmhhORzO_48aHlmt16juCwGM_Sv9gZJvQH2JyFUjnaA,4726 +bson/decimal128.py,sha256=WQY7oaox5DT0ksbz5Hf3q557piZiOtspYNQiYq1ngJw,10237 +bson/errors.py,sha256=l2SBkC8gw4fWHtN3zoLtraBdT7xVkIlYEnYQ7llmnxc,1134 +bson/int64.py,sha256=Vjj7fVTzrGjcZlCaG10Jf4JzP-ZW884B4RDUp3YcIj0,1159 +bson/json_util.py,sha256=vjJ_Cn7rxzwvwBpZlBkDoFLMKD-Z5izzTDtbFqAfFaY,36374 +bson/max_key.py,sha256=bclzQ8YoSpqIbki_AQhSr07wL7okvbWHeclTGzySoNU,1468 +bson/min_key.py,sha256=TqG4ih30DJieBONfBPrvn3Tn9yqdSM0NTfODmAHsw34,1468 +bson/objectid.py,sha256=Qmk8pN9tJ-sqJ7GQJBFPm_oAqryi9SYSyz-Ojn3iRTY,9208 +bson/py.typed,sha256=zwRNjZOV3leFzGdwfkbvGOfGbBoplIeeZu4g7d6P928,170 +bson/raw_bson.py,sha256=o2yZEpXz1hFs4jjErHmyxkQWqxzOFM2UFsQN88HsyWw,7347 +bson/regex.py,sha256=WGeN9deYsfUHEZ0GeMNg30nGSQCwrU7YqGJqZynQ6rU,4606 +bson/son.py,sha256=iW6WAIi4oxZbVGQM28DIlVVoveX9clb9pxw-irJIZ8g,6412 +bson/time64.c,sha256=MJuQb_pvDHqUURaZAmGQGlF4NdV6ySKjvjGMKMtX-BM,21527 +bson/time64.h,sha256=NX8Xnr9nVAae14koal7zzUk3w1hOGmmzFFjlfodk1JM,1561 +bson/time64_config.h,sha256=2GEfhl7D8X8wLbgMR_vJo0hMqNqHaOcPhAX_z_FdQQw,1682 +bson/time64_limits.h,sha256=YGomcc4c9sPhR88LbfNHAVxkRs5anqWuSsuVD97j-1o,1492 +bson/timestamp.py,sha256=7l7Ob5IIwPiNsyU7uymnaYxrEScAi3eXwY8p1jdF09A,4217 +bson/typings.py,sha256=tJbKMr3ATMxJKPzniUGbtu2U2b_2S6yqzGQbo1QiCig,1102 +bson/tz_util.py,sha256=UQsE7-79lhm6nwrKVXNk8QKutWPfNdKGp_N7A0uTd4w,1726 +gridfs/__init__.py,sha256=ndgL7aNnOv4G6ZHyLXCwdjiyq8bRbQ_K9078pueE8Ro,38635 +gridfs/__pycache__/__init__.cpython-38.pyc,, +gridfs/__pycache__/errors.cpython-38.pyc,, +gridfs/__pycache__/grid_file.cpython-38.pyc,, +gridfs/errors.py,sha256=Z7E-XkxtrWNfob3cTBSgeRlTvHcG02DCPv4X_EmvBkQ,1056 +gridfs/grid_file.py,sha256=6750H9B3BmJn9WSHlRF7fWVtOWzBBDnCXQJAfGB2SLQ,33344 +gridfs/py.typed,sha256=zwRNjZOV3leFzGdwfkbvGOfGbBoplIeeZu4g7d6P928,170 +pymongo-4.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pymongo-4.5.0.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357 +pymongo-4.5.0.dist-info/METADATA,sha256=K3Ok47Cf9bkQp9GLNtKjUX-O7GzHQ4YGcI_VG8Ws-P8,22072 +pymongo-4.5.0.dist-info/RECORD,, +pymongo-4.5.0.dist-info/WHEEL,sha256=XyhiD6utIo5XKoStn_HbcI9tJcXJDOz6sK6Qm9kpPjQ,111 +pymongo-4.5.0.dist-info/top_level.txt,sha256=OinVojDdOfo1Dsp-NRfrZdp6gcJJ4bPRq61vSg5vyAs,20 +pymongo/__init__.py,sha256=oERkaPC8S6H0sDvO4jIswkFmZi663yTrUALHQ-_SxN0,5209 +pymongo/__pycache__/__init__.cpython-38.pyc,, +pymongo/__pycache__/_csot.cpython-38.pyc,, +pymongo/__pycache__/_version.cpython-38.pyc,, +pymongo/__pycache__/aggregation.cpython-38.pyc,, +pymongo/__pycache__/auth.cpython-38.pyc,, +pymongo/__pycache__/auth_aws.cpython-38.pyc,, +pymongo/__pycache__/auth_oidc.cpython-38.pyc,, +pymongo/__pycache__/bulk.cpython-38.pyc,, +pymongo/__pycache__/change_stream.cpython-38.pyc,, +pymongo/__pycache__/client_options.cpython-38.pyc,, +pymongo/__pycache__/client_session.cpython-38.pyc,, +pymongo/__pycache__/collation.cpython-38.pyc,, +pymongo/__pycache__/collection.cpython-38.pyc,, +pymongo/__pycache__/command_cursor.cpython-38.pyc,, +pymongo/__pycache__/common.cpython-38.pyc,, +pymongo/__pycache__/compression_support.cpython-38.pyc,, +pymongo/__pycache__/cursor.cpython-38.pyc,, +pymongo/__pycache__/daemon.cpython-38.pyc,, +pymongo/__pycache__/database.cpython-38.pyc,, +pymongo/__pycache__/driver_info.cpython-38.pyc,, +pymongo/__pycache__/encryption.cpython-38.pyc,, +pymongo/__pycache__/encryption_options.cpython-38.pyc,, +pymongo/__pycache__/errors.cpython-38.pyc,, +pymongo/__pycache__/event_loggers.cpython-38.pyc,, +pymongo/__pycache__/hello.cpython-38.pyc,, +pymongo/__pycache__/helpers.cpython-38.pyc,, +pymongo/__pycache__/lock.cpython-38.pyc,, +pymongo/__pycache__/max_staleness_selectors.cpython-38.pyc,, +pymongo/__pycache__/message.cpython-38.pyc,, +pymongo/__pycache__/mongo_client.cpython-38.pyc,, +pymongo/__pycache__/monitor.cpython-38.pyc,, +pymongo/__pycache__/monitoring.cpython-38.pyc,, +pymongo/__pycache__/network.cpython-38.pyc,, +pymongo/__pycache__/ocsp_cache.cpython-38.pyc,, +pymongo/__pycache__/ocsp_support.cpython-38.pyc,, +pymongo/__pycache__/operations.cpython-38.pyc,, +pymongo/__pycache__/periodic_executor.cpython-38.pyc,, +pymongo/__pycache__/pool.cpython-38.pyc,, +pymongo/__pycache__/pyopenssl_context.cpython-38.pyc,, +pymongo/__pycache__/read_concern.cpython-38.pyc,, +pymongo/__pycache__/read_preferences.cpython-38.pyc,, +pymongo/__pycache__/response.cpython-38.pyc,, +pymongo/__pycache__/results.cpython-38.pyc,, +pymongo/__pycache__/saslprep.cpython-38.pyc,, +pymongo/__pycache__/server.cpython-38.pyc,, +pymongo/__pycache__/server_api.cpython-38.pyc,, +pymongo/__pycache__/server_description.cpython-38.pyc,, +pymongo/__pycache__/server_selectors.cpython-38.pyc,, +pymongo/__pycache__/server_type.cpython-38.pyc,, +pymongo/__pycache__/settings.cpython-38.pyc,, +pymongo/__pycache__/socket_checker.cpython-38.pyc,, +pymongo/__pycache__/srv_resolver.cpython-38.pyc,, +pymongo/__pycache__/ssl_context.cpython-38.pyc,, +pymongo/__pycache__/ssl_support.cpython-38.pyc,, +pymongo/__pycache__/topology.cpython-38.pyc,, +pymongo/__pycache__/topology_description.cpython-38.pyc,, +pymongo/__pycache__/typings.cpython-38.pyc,, +pymongo/__pycache__/uri_parser.cpython-38.pyc,, +pymongo/__pycache__/write_concern.cpython-38.pyc,, +pymongo/_cmessage.cpython-38-x86_64-linux-gnu.so,sha256=fRYARPL0i8Qdw3aQWQ6H-5booSrA8vO7L6qkgm9k7HQ,496616 +pymongo/_cmessagemodule.c,sha256=pVG5XALKOyBIwFVjLksxy6TuFTGB02S8H0viA7zYR0I,31812 +pymongo/_csot.py,sha256=mZoWvMz05j6qJOnnUWhmo9u2P1jwah0NALJUOFLqdYQ,4549 +pymongo/_version.py,sha256=bwEtgYUUCy3G09y3eEL1wNq6-uK-oAZPQaY54p7tva8,965 +pymongo/aggregation.py,sha256=k-RzdmuDNj2j-gBcUZnVSXlBhJDL5bReAe6at_9X520,9368 +pymongo/auth.py,sha256=Pm_x6WxUrdqnEOEANGpii9TEjwehvirIej3-PJSOTpg,22858 +pymongo/auth_aws.py,sha256=4Mau-aTUjEzAxsxc_A1dVa6j76ik-_Hop6TU6UmFhiY,4023 +pymongo/auth_oidc.py,sha256=dgiRrCH0Q037hHV_wsbCayt_bPWEVDtUewVc1cUnqds,11295 +pymongo/bulk.py,sha256=VEMMXxXyiw-tvKz2Wb0jyUTzLlUhUXZddFby9iTcpVk,20752 +pymongo/change_stream.py,sha256=R4mKPJTBlW4Qvx5EZfji1drxSsbEwc3g4GpmdnqWv1w,18604 +pymongo/client_options.py,sha256=Frw8exHYb4ke7c77GCT4Db6KsNSS70QOtjJxDR0IHo0,12156 +pymongo/client_session.py,sha256=viiUqth1cCMNCs7TP38Ux6R7pRM1KbTbpeCP17tfjzk,43982 +pymongo/collation.py,sha256=fLB9bbRGMoD9fhmyuU76VoL5FhzlHSemnzoQnXbxJkQ,7966 +pymongo/collection.py,sha256=c4gsv9LslaXPEtVh6DXAOqFiVD__2W0Y-c1BZRnJbf8,142474 +pymongo/command_cursor.py,sha256=c41vk81ugb5eaaXkCZaBpAd8nShiDedxR4n-WIXPtWQ,14277 +pymongo/common.py,sha256=0ZdAiu4SwQE1XmLzkD-fZbRKbQxdK7rIsab_YJUyaxo,36931 +pymongo/compression_support.py,sha256=dw4Md4oOOkHqTcpcDgLCqIvUm8AE-5hAJGihmr37gSg,5033 +pymongo/cursor.py,sha256=cW6VR5aXRFcQ8WyzU9vsWSWAIaaOL_GF3dPIZ53F6pU,50934 +pymongo/daemon.py,sha256=WbafbNhDCeKUykHK5xyhCHKbq5bkt961OFgAhmkWVgw,5713 +pymongo/database.py,sha256=Ncz1vsFL9qvt0reY47AYAawC7piWDXwSpSmhqONLthc,56205 +pymongo/driver_info.py,sha256=frEuDexj0-UEUWH1OybPf_yr7Ot53Cnmb_QAqWC3Wlg,1703 +pymongo/encryption.py,sha256=aNHDUObKmskJWvfn_mC7cXe54pd7aatTENGBJE2vARo,43605 +pymongo/encryption_options.py,sha256=Kpq7pji5XB83xZDuifGqIFKKOFKgM9cdx4Rgi1iFIeU,12438 +pymongo/errors.py,sha256=SH_8CuEvvCpT1sMqnqTnYlCfSUtAweTrj_7Ja0dLfBk,11890 +pymongo/event_loggers.py,sha256=BfQsntDEM1gETF6Sae4ws0Kzg11G4rDaXDyHZgqqXS0,9097 +pymongo/hello.py,sha256=iaNoJrUXb0i9aGYwnwWA6zGit9Of9XffhRMhCUGb5wU,6589 +pymongo/helpers.py,sha256=ZwWKm8DopDJSMG_RIT00Ch-NpDKrLW3R25p-ilI4q88,11204 +pymongo/lock.py,sha256=NaWqT_pd9vd5kYGtPgItBPJ3VaB0BE5pOEfRrbB9xxg,1226 +pymongo/max_staleness_selectors.py,sha256=wOv2TFmJVGf5mMg9G2OlaJK6BfhmXNxRWuJL4iQgQvo,4643 +pymongo/message.py,sha256=nyIy3O-33DCww0M84b2Z8QEs2ZX-VKoFIFR1JCl_eIM,54529 +pymongo/mongo_client.py,sha256=vebGZXcTszOkyA_HTET6xQGoF2FC1FvbQ02rsoY5Sd8,101172 +pymongo/monitor.py,sha256=qGjYRr2OYjNCnH8G4PSqUNR1HdpCxTSgz-3OjYxI5Uo,16591 +pymongo/monitoring.py,sha256=AqAIhlmpOAnc4bVrurzG0IoEJKV6tlMsWZYj7iXRVKU,61073 +pymongo/network.py,sha256=99Gz4_rzBDXeKLx4V23iTGERnnDyD02xq4_Fy5x9NYw,13471 +pymongo/ocsp_cache.py,sha256=Caq07U2jrI4_XmxSdA_LI0gUufThCD2H1VNOAxqbAf0,3844 +pymongo/ocsp_support.py,sha256=t2r8r0NFCz15nrrMLoVRx8Vq4kIDF_XighkwlIIGb_k,17779 +pymongo/operations.py,sha256=_mY7CerHornzkRQC_MdEn759dWtDHf9OR0vNDsM1F-o,20526 +pymongo/periodic_executor.py,sha256=EudQ8zrRNXfOI27HFUBnTpkurKt_O_9vQhrKBk5LnPU,6346 +pymongo/pool.py,sha256=ZNKX_HJoiUPevv2oSpeu_eYiptmcOoj2Wy3-P2B5OR4,70947 +pymongo/py.typed,sha256=zwRNjZOV3leFzGdwfkbvGOfGbBoplIeeZu4g7d6P928,170 +pymongo/pyopenssl_context.py,sha256=cQ8Ox7uwYMEuTe4ONyrEPgYGCcgP3JZRa7xcP7eXHxg,16607 +pymongo/read_concern.py,sha256=3-T60VwM1H1yRRpW6L1idb1drrkEf0_JiabGl7ntx2g,2401 +pymongo/read_preferences.py,sha256=HnHqjIGtc88pTey1sHbx53zo3MlJCcGQlHQOzjZNQP4,21459 +pymongo/response.py,sha256=1pCtNe19cQ2t59wwAlkDZYvMtuij_7MrnuAVpckV6uY,4328 +pymongo/results.py,sha256=bppvFE1Kh4szQ4gBufV6_q14Ontsu36hWwDbqNVXGQc,7693 +pymongo/saslprep.py,sha256=Sngd_JBvBXiUqF3sJYWjhT79GwaBeuARRR0LqhRDfDY,4336 +pymongo/server.py,sha256=2EUQLRPoClbADHKXOt-og5jEOUx92DdiHfd1Uye-e9g,10346 +pymongo/server_api.py,sha256=pNePNi9xdAmm_okfFoocBRemWD5r6FEfLF6_Ojn6abw,6157 +pymongo/server_description.py,sha256=m3I17VDPjxEvsGS52mEY9SfOA9K0pwXDHePLHPGsWes,9656 +pymongo/server_selectors.py,sha256=T2Rf-UseXW4GOJOc6uzUAV3pcBjOEo7Rc82LghBYj_E,6084 +pymongo/server_type.py,sha256=Qq4_UHR8D7Tm7PV5NsEdeJAJf_6M0vhfkdfcXebi5hY,888 +pymongo/settings.py,sha256=udhtkU4Rrv34WsqFhGbjgGaPxN514EPX9vEpWSYtjkg,5784 +pymongo/socket_checker.py,sha256=moKE6RqV_D7F3eHjthVC1erZxXF8R_6mIu_6mqSb1kM,4161 +pymongo/srv_resolver.py,sha256=Ozh6YJCn8FagthwUGR8qHD5V5MWvPlkleDsj_DYn95E,4791 +pymongo/ssl_context.py,sha256=ZOnO-YOoLqbbp2g5mJjPMI1z8kJgydutKENUJiOKMAk,1390 +pymongo/ssl_support.py,sha256=sscYGp3msUtQpTBd_vSuKe-YE2AQEVwffaBXd79C_p4,3807 +pymongo/topology.py,sha256=TgrbBMHDsom44hGh8eSP7pkff-uJi3F41ToRDoSrqKI,38189 +pymongo/topology_description.py,sha256=P6qM8gQny1PEhavydWuvdUehfxKzOuK2waHrr8XTm7Q,26589 +pymongo/typings.py,sha256=y7VxnmaDKpb3bNaHEOSca7m__91sPQ3aLJLlla1aLIs,1484 +pymongo/uri_parser.py,sha256=mKWWhnFBj6fI8pMvvi6N-z-DODlAel40OgQz8nYgSAQ,23727 +pymongo/write_concern.py,sha256=vuaAYDH8_T_7d-cg7EBeQLsa_bGIYufCD1L_COZii1g,5169 diff --git a/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..78f594716707111ca5e69c75679d4b35c0949787 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.41.2) +Root-Is-Purelib: false +Tag: cp38-cp38-manylinux2014_x86_64 + diff --git a/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..7b660e26d75f30e3169c7f8b6bd2104fa3f78e44 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo-4.5.0.dist-info/top_level.txt @@ -0,0 +1,3 @@ +bson +gridfs +pymongo diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__init__.py b/backend/test/lib/python3.8/site-packages/pymongo/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a3bdb4c163749f1b457d8c98d93d70daee6907c4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/__init__.py @@ -0,0 +1,177 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python driver for MongoDB.""" + +from typing import ContextManager, Optional + +__all__ = [ + "ASCENDING", + "DESCENDING", + "GEO2D", + "GEOSPHERE", + "HASHED", + "TEXT", + "version_tuple", + "get_version_string", + "__version__", + "version", + "ReturnDocument", + "MAX_SUPPORTED_WIRE_VERSION", + "MIN_SUPPORTED_WIRE_VERSION", + "CursorType", + "MongoClient", + "DeleteMany", + "DeleteOne", + "IndexModel", + "InsertOne", + "ReplaceOne", + "UpdateMany", + "UpdateOne", + "ReadPreference", + "WriteConcern", + "has_c", + "timeout", +] + +ASCENDING = 1 +"""Ascending sort order.""" +DESCENDING = -1 +"""Descending sort order.""" + +GEO2D = "2d" +"""Index specifier for a 2-dimensional `geospatial index`_. + +.. _geospatial index: http://mongodb.com/docs/manual/core/2d/ +""" + +GEOSPHERE = "2dsphere" +"""Index specifier for a `spherical geospatial index`_. + +.. versionadded:: 2.5 + +.. _spherical geospatial index: http://mongodb.com/docs/manual/core/2dsphere/ +""" + +HASHED = "hashed" +"""Index specifier for a `hashed index`_. + +.. versionadded:: 2.5 + +.. _hashed index: http://mongodb.com/docs/manual/core/index-hashed/ +""" + +TEXT = "text" +"""Index specifier for a `text index`_. + +.. seealso:: MongoDB's `Atlas Search + <https://docs.atlas.mongodb.com/atlas-search/>`_ which offers more advanced + text search functionality. + +.. versionadded:: 2.7.1 + +.. _text index: http://mongodb.com/docs/manual/core/index-text/ +""" + +from pymongo import _csot +from pymongo._version import __version__, get_version_string, version_tuple +from pymongo.collection import ReturnDocument +from pymongo.common import MAX_SUPPORTED_WIRE_VERSION, MIN_SUPPORTED_WIRE_VERSION +from pymongo.cursor import CursorType +from pymongo.mongo_client import MongoClient +from pymongo.operations import ( + DeleteMany, + DeleteOne, + IndexModel, + InsertOne, + ReplaceOne, + UpdateMany, + UpdateOne, +) +from pymongo.read_preferences import ReadPreference +from pymongo.write_concern import WriteConcern + +version = __version__ +"""Current version of PyMongo.""" + + +def has_c() -> bool: + """Is the C extension installed?""" + try: + from pymongo import _cmessage # type: ignore[attr-defined] # noqa: F401 + + return True + except ImportError: + return False + + +def timeout(seconds: Optional[float]) -> ContextManager: + """**(Provisional)** Apply the given timeout for a block of operations. + + .. note:: :func:`~pymongo.timeout` is currently provisional. Backwards + incompatible changes may occur before becoming officially supported. + + Use :func:`~pymongo.timeout` in a with-statement:: + + with pymongo.timeout(5): + client.db.coll.insert_one({}) + client.db.coll2.insert_one({}) + + When the with-statement is entered, a deadline is set for the entire + block. When that deadline is exceeded, any blocking pymongo operation + will raise a timeout exception. For example:: + + try: + with pymongo.timeout(5): + client.db.coll.insert_one({}) + time.sleep(5) + # The deadline has now expired, the next operation will raise + # a timeout exception. + client.db.coll2.insert_one({}) + except PyMongoError as exc: + if exc.timeout: + print(f"block timed out: {exc!r}") + else: + print(f"failed with non-timeout error: {exc!r}") + + When nesting :func:`~pymongo.timeout`, the nested deadline is capped by + the outer deadline. The deadline can only be shortened, not extended. + When exiting the block, the previous deadline is restored:: + + with pymongo.timeout(5): + coll.find_one() # Uses the 5 second deadline. + with pymongo.timeout(3): + coll.find_one() # Uses the 3 second deadline. + coll.find_one() # Uses the original 5 second deadline. + with pymongo.timeout(10): + coll.find_one() # Still uses the original 5 second deadline. + coll.find_one() # Uses the original 5 second deadline. + + :Parameters: + - `seconds`: A non-negative floating point number expressing seconds, or None. + + :Raises: + - :py:class:`ValueError`: When `seconds` is negative. + + See :ref:`timeout-example` for more examples. + + .. versionadded:: 4.2 + """ + if not isinstance(seconds, (int, float, type(None))): + raise TypeError("timeout must be None, an int, or a float") + if seconds and seconds < 0: + raise ValueError("timeout cannot be negative") + if seconds is not None: + seconds = float(seconds) + return _csot._TimeoutContext(seconds) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9dc2472f2049f7768f7ea768a1f39f15e099ef1e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/_csot.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/_csot.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..120f4628c50c7ce7266c05725ee729e11ca4ecc8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/_csot.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/_version.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/_version.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0929ce62d1509f907757ba89dbd7323fd4e33226 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/_version.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/aggregation.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/aggregation.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48b1af6a7f0dc3f66176535a6d83125123cb6dc0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/aggregation.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/auth.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/auth.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70dd925cd8c69bf2b581a2f0b916414d2078559e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/auth.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/auth_aws.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/auth_aws.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..232d954f10301cbc59eb56b44eefa85ff6f4a5ce Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/auth_aws.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/auth_oidc.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/auth_oidc.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c6f105900fb2f496c8b662518a27a3a492d6f613 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/auth_oidc.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/bulk.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/bulk.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab38a6b13f0c70a7df4f58cbdf5e2bb95ad62b3b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/bulk.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/change_stream.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/change_stream.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5194b1b3f095365f8823b1feebd96018f8d231c3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/change_stream.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/client_options.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/client_options.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d71d52d9dbb1f15e78738abc5b2682fbadf5933c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/client_options.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/client_session.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/client_session.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af3575674a4e967692ce5a4479d2fe0371ad1760 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/client_session.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/collation.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/collation.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3955d5a0e98dcdcdf382801a6dab69eb9c7fa43 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/collation.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/collection.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/collection.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c17c8f41868c867e8bdcbe2a7565ef58b0c0ac2f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/collection.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/command_cursor.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/command_cursor.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b12f6b4567c7e2f4469e4e6a58b02cdb116631fc Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/command_cursor.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/common.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/common.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b084420cd94f13ffa5643cdfc1d1b519c102efae Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/common.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/compression_support.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/compression_support.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e2c9dcec960dd651571fd773ee3d64682c6b1372 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/compression_support.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/cursor.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/cursor.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef9892501b67057995a923905404108112f86ce3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/cursor.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/daemon.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/daemon.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de22733a906e16b185c4810a93fedd015fd4d4c4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/daemon.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/database.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/database.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b65ad5b4ead5d46fa5a8197e87405440b3005aeb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/database.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/driver_info.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/driver_info.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0271ba4faeee47955fa5735b96d5d44a7202c846 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/driver_info.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/encryption.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/encryption.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7560d8a89c607634f81b8feb3410342eb4312ab Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/encryption.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/encryption_options.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/encryption_options.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bbc09a535d1674d18d55c2e9d5dec8698410d756 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/encryption_options.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/errors.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/errors.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..896ed15b1879c2611750c6272cff21e0a6d8389f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/errors.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/event_loggers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/event_loggers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d84ee7a8cb3d898a04e894054b92ff32690192be Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/event_loggers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/hello.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/hello.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..37200ce4808c58f390e8e5ea5958036a1821e56b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/hello.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/helpers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/helpers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..649d4c9bc0ebac2c08ba3ba9513058b02d1db804 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/helpers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/lock.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/lock.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..483babcdc836fbbac1c72c1e58b6d4a95da956a1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/lock.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/max_staleness_selectors.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/max_staleness_selectors.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7c39c5693661811e9a14238535e4a1d3d260563 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/max_staleness_selectors.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/message.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/message.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee4c57a252e993d306286f1ec6b2e129b53295a9 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/message.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/mongo_client.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/mongo_client.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..087a008eef9fa90c41d7120091d30a54036cb6cd Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/mongo_client.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/monitor.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/monitor.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e658e636d4bba62d98e197eae0aa7ddac3778350 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/monitor.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/monitoring.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/monitoring.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4543db91260377842a9397821a3075ee389b8898 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/monitoring.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/network.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/network.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e5ff616dbc70c8a834418d1732f2b2bf5ca125cf Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/network.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ocsp_cache.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ocsp_cache.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..daf7b72f433bbc73c5d29bee98c2e6a079e77d79 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ocsp_cache.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ocsp_support.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ocsp_support.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2fa7464309f38e1e909ecec9f2deb61a91fc2674 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ocsp_support.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/operations.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/operations.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8de881666493ef8e198fc6103de771a8a4897b7c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/operations.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/periodic_executor.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/periodic_executor.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c88c5e7511a355fbfc53c4e7d9cb33dc0c8693e1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/periodic_executor.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/pool.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/pool.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f712c33363794665be13b95bc133c59130a4cf1f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/pool.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/pyopenssl_context.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/pyopenssl_context.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fba1d40e7d2e7e1e6b1e7d8c306ab4642127de9c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/pyopenssl_context.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/read_concern.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/read_concern.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b83c9e389c93c8151b1cbdc364f9abe3c33266de Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/read_concern.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/read_preferences.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/read_preferences.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7e680f79914320239e9e7a44235d32c2f409e1e0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/read_preferences.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/response.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/response.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..373802c663dc515c13e52b0bd8445eb6fe87827e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/response.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/results.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/results.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4bd0d44f736e67e8669402d21d4805f71e175273 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/results.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/saslprep.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/saslprep.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4d2dd834b3ef3315f46e0eebc3918dc5bc9e9d05 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/saslprep.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ad044e05bf56dc2f40a503c874697ad6edd52458 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_api.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_api.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c40d1ad5fdcc95b70bf00da63bed7088d89be07a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_api.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_description.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_description.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..33eb3900ae5a77c580bd86967e767af1e22ca38a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_description.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_selectors.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_selectors.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5dfd173bfce9ae984156a63bb0acdbc5ed7bfd99 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_selectors.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_type.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_type.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a642ae60927ca94e479d8c69b7fda0c7637c427b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/server_type.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/settings.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/settings.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7bae9c681e797415102f668b64246a345f935c4c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/settings.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/socket_checker.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/socket_checker.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6f93840f5bb8f91e2c293b33ff9c3aa182688bab Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/socket_checker.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/srv_resolver.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/srv_resolver.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9900a7ecef3749d64cbe1c6f522bf198446a8bf Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/srv_resolver.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ssl_context.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ssl_context.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7fa84603075f0d9f08ce1fd9bbb83d298219051c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ssl_context.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ssl_support.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ssl_support.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ae7fe18b0477b40f6ad1641a0bbd0db04692972 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/ssl_support.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/topology.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/topology.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1cea75fb1fccc26b6a5636d510052a52453f4326 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/topology.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/topology_description.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/topology_description.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..92d47e549ffb78784c26acac37140030edd50442 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/topology_description.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/typings.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/typings.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ff6f20233752658f606c1c6fabcf8e83c9aaf02d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/typings.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/uri_parser.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/uri_parser.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..52553b2ed76f29f68cb4644c7a9f7ad22d880cb1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/uri_parser.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/write_concern.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/write_concern.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4704d5993b9dd1aead55ee7c108c6e4ebb686e1d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/__pycache__/write_concern.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/_cmessage.cpython-38-x86_64-linux-gnu.so b/backend/test/lib/python3.8/site-packages/pymongo/_cmessage.cpython-38-x86_64-linux-gnu.so new file mode 100755 index 0000000000000000000000000000000000000000..f00e879b9714304e4c69120332a4195b13bafc97 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/pymongo/_cmessage.cpython-38-x86_64-linux-gnu.so differ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/_cmessagemodule.c b/backend/test/lib/python3.8/site-packages/pymongo/_cmessagemodule.c new file mode 100644 index 0000000000000000000000000000000000000000..7ac66a1e4bdd6be676cc0acae10b6bc51d17d469 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/_cmessagemodule.c @@ -0,0 +1,1011 @@ +/* + * Copyright 2009-present MongoDB, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * This file contains C implementations of some of the functions + * needed by the message module. If possible, these implementations + * should be used to speed up message creation. + */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" + +#include "_cbsonmodule.h" +#include "buffer.h" + +struct module_state { + PyObject* _cbson; + PyObject* _max_bson_size_str; + PyObject* _max_message_size_str; + PyObject* _max_write_batch_size_str; + PyObject* _max_split_size_str; +}; + +/* See comments about module initialization in _cbsonmodule.c */ +#define GETSTATE(m) ((struct module_state*)PyModule_GetState(m)) + +#define DOC_TOO_LARGE_FMT "BSON document too large (%d bytes)" \ + " - the connected server supports" \ + " BSON document sizes up to %ld bytes." + +/* Get an error class from the pymongo.errors module. + * + * Returns a new ref */ +static PyObject* _error(char* name) { + PyObject* error; + PyObject* errors = PyImport_ImportModule("pymongo.errors"); + if (!errors) { + return NULL; + } + error = PyObject_GetAttrString(errors, name); + Py_DECREF(errors); + return error; +} + +/* The same as buffer_write_bytes except that it also validates + * "size" will fit in an int. + * Returns 0 on failure */ +static int buffer_write_bytes_ssize_t(buffer_t buffer, const char* data, Py_ssize_t size) { + int downsize = _downcast_and_check(size, 0); + if (size == -1) { + return 0; + } + return buffer_write_bytes(buffer, data, downsize); +} + +static PyObject* _cbson_query_message(PyObject* self, PyObject* args) { + /* NOTE just using a random number as the request_id */ + struct module_state *state = GETSTATE(self); + + int request_id = rand(); + unsigned int flags; + char* collection_name = NULL; + Py_ssize_t collection_name_length; + int begin, cur_size, max_size = 0; + int num_to_skip; + int num_to_return; + PyObject* query; + PyObject* field_selector; + PyObject* options_obj; + codec_options_t options; + buffer_t buffer = NULL; + int length_location, message_length; + PyObject* result = NULL; + + if (!(PyArg_ParseTuple(args, "Iet#iiOOO", + &flags, + "utf-8", + &collection_name, + &collection_name_length, + &num_to_skip, &num_to_return, + &query, &field_selector, + &options_obj) && + convert_codec_options(state->_cbson, options_obj, &options))) { + return NULL; + } + buffer = pymongo_buffer_new(); + if (!buffer) { + goto fail; + } + + // save space for message length + length_location = pymongo_buffer_save_space(buffer, 4); + if (length_location == -1) { + goto fail; + } + + if (!buffer_write_int32(buffer, (int32_t)request_id) || + !buffer_write_bytes(buffer, "\x00\x00\x00\x00\xd4\x07\x00\x00", 8) || + !buffer_write_int32(buffer, (int32_t)flags) || + !buffer_write_bytes_ssize_t(buffer, collection_name, + collection_name_length + 1) || + !buffer_write_int32(buffer, (int32_t)num_to_skip) || + !buffer_write_int32(buffer, (int32_t)num_to_return)) { + goto fail; + } + + begin = pymongo_buffer_get_position(buffer); + if (!write_dict(state->_cbson, buffer, query, 0, &options, 1)) { + goto fail; + } + + max_size = pymongo_buffer_get_position(buffer) - begin; + + if (field_selector != Py_None) { + begin = pymongo_buffer_get_position(buffer); + if (!write_dict(state->_cbson, buffer, field_selector, 0, + &options, 1)) { + goto fail; + } + cur_size = pymongo_buffer_get_position(buffer) - begin; + max_size = (cur_size > max_size) ? cur_size : max_size; + } + + message_length = pymongo_buffer_get_position(buffer) - length_location; + buffer_write_int32_at_position( + buffer, length_location, (int32_t)message_length); + + /* objectify buffer */ + result = Py_BuildValue("iy#i", request_id, + pymongo_buffer_get_buffer(buffer), + (Py_ssize_t)pymongo_buffer_get_position(buffer), + max_size); +fail: + PyMem_Free(collection_name); + destroy_codec_options(&options); + if (buffer) { + pymongo_buffer_free(buffer); + } + return result; +} + +static PyObject* _cbson_get_more_message(PyObject* self, PyObject* args) { + /* NOTE just using a random number as the request_id */ + int request_id = rand(); + char* collection_name = NULL; + Py_ssize_t collection_name_length; + int num_to_return; + long long cursor_id; + buffer_t buffer = NULL; + int length_location, message_length; + PyObject* result = NULL; + + if (!PyArg_ParseTuple(args, "et#iL", + "utf-8", + &collection_name, + &collection_name_length, + &num_to_return, + &cursor_id)) { + return NULL; + } + buffer = pymongo_buffer_new(); + if (!buffer) { + goto fail; + } + + // save space for message length + length_location = pymongo_buffer_save_space(buffer, 4); + if (length_location == -1) { + goto fail; + } + if (!buffer_write_int32(buffer, (int32_t)request_id) || + !buffer_write_bytes(buffer, + "\x00\x00\x00\x00" + "\xd5\x07\x00\x00" + "\x00\x00\x00\x00", 12) || + !buffer_write_bytes_ssize_t(buffer, + collection_name, + collection_name_length + 1) || + !buffer_write_int32(buffer, (int32_t)num_to_return) || + !buffer_write_int64(buffer, (int64_t)cursor_id)) { + goto fail; + } + + message_length = pymongo_buffer_get_position(buffer) - length_location; + buffer_write_int32_at_position( + buffer, length_location, (int32_t)message_length); + + /* objectify buffer */ + result = Py_BuildValue("iy#", request_id, + pymongo_buffer_get_buffer(buffer), + (Py_ssize_t)pymongo_buffer_get_position(buffer)); +fail: + PyMem_Free(collection_name); + if (buffer) { + pymongo_buffer_free(buffer); + } + return result; +} + +/* + * NOTE this method handles multiple documents in a type one payload but + * it does not perform batch splitting and the total message size is + * only checked *after* generating the entire message. + */ +static PyObject* _cbson_op_msg(PyObject* self, PyObject* args) { + struct module_state *state = GETSTATE(self); + + /* NOTE just using a random number as the request_id */ + int request_id = rand(); + unsigned int flags; + PyObject* command; + char* identifier = NULL; + Py_ssize_t identifier_length = 0; + PyObject* docs; + PyObject* doc; + PyObject* options_obj; + codec_options_t options; + buffer_t buffer = NULL; + int length_location, message_length; + int total_size = 0; + int max_doc_size = 0; + PyObject* result = NULL; + PyObject* iterator = NULL; + + /*flags, command, identifier, docs, opts*/ + if (!(PyArg_ParseTuple(args, "IOet#OO", + &flags, + &command, + "utf-8", + &identifier, + &identifier_length, + &docs, + &options_obj) && + convert_codec_options(state->_cbson, options_obj, &options))) { + return NULL; + } + buffer = pymongo_buffer_new(); + if (!buffer) { + goto fail; + } + + // save space for message length + length_location = pymongo_buffer_save_space(buffer, 4); + if (length_location == -1) { + goto fail; + } + if (!buffer_write_int32(buffer, (int32_t)request_id) || + !buffer_write_bytes(buffer, + "\x00\x00\x00\x00" /* responseTo */ + "\xdd\x07\x00\x00" /* 2013 */, 8)) { + goto fail; + } + + if (!buffer_write_int32(buffer, (int32_t)flags) || + !buffer_write_bytes(buffer, "\x00", 1) /* Payload type 0 */) { + goto fail; + } + total_size = write_dict(state->_cbson, buffer, command, 0, + &options, 1); + if (!total_size) { + goto fail; + } + + if (identifier_length) { + int payload_one_length_location, payload_length; + /* Payload type 1 */ + if (!buffer_write_bytes(buffer, "\x01", 1)) { + goto fail; + } + /* save space for payload 0 length */ + payload_one_length_location = pymongo_buffer_save_space(buffer, 4); + /* C string identifier */ + if (!buffer_write_bytes_ssize_t(buffer, identifier, identifier_length + 1)) { + goto fail; + } + iterator = PyObject_GetIter(docs); + if (iterator == NULL) { + goto fail; + } + while ((doc = PyIter_Next(iterator)) != NULL) { + int encoded_doc_size = write_dict( + state->_cbson, buffer, doc, 0, &options, 1); + if (!encoded_doc_size) { + Py_CLEAR(doc); + goto fail; + } + if (encoded_doc_size > max_doc_size) { + max_doc_size = encoded_doc_size; + } + Py_CLEAR(doc); + } + + payload_length = pymongo_buffer_get_position(buffer) - payload_one_length_location; + buffer_write_int32_at_position( + buffer, payload_one_length_location, (int32_t)payload_length); + total_size += payload_length; + } + + message_length = pymongo_buffer_get_position(buffer) - length_location; + buffer_write_int32_at_position( + buffer, length_location, (int32_t)message_length); + + /* objectify buffer */ + result = Py_BuildValue("iy#ii", request_id, + pymongo_buffer_get_buffer(buffer), + (Py_ssize_t)pymongo_buffer_get_position(buffer), + total_size, + max_doc_size); +fail: + Py_XDECREF(iterator); + if (buffer) { + pymongo_buffer_free(buffer); + } + PyMem_Free(identifier); + destroy_codec_options(&options); + return result; +} + + +static void +_set_document_too_large(int size, long max) { + PyObject* DocumentTooLarge = _error("DocumentTooLarge"); + if (DocumentTooLarge) { + PyObject* error = PyUnicode_FromFormat(DOC_TOO_LARGE_FMT, size, max); + if (error) { + PyErr_SetObject(DocumentTooLarge, error); + Py_DECREF(error); + } + Py_DECREF(DocumentTooLarge); + } +} + +#define _INSERT 0 +#define _UPDATE 1 +#define _DELETE 2 + +/* OP_MSG ----------------------------------------------- */ + +static int +_batched_op_msg( + unsigned char op, unsigned char ack, + PyObject* command, PyObject* docs, PyObject* ctx, + PyObject* to_publish, codec_options_t options, + buffer_t buffer, struct module_state *state) { + + long max_bson_size; + long max_write_batch_size; + long max_message_size; + int idx = 0; + int size_location; + int position; + int length; + PyObject* max_bson_size_obj = NULL; + PyObject* max_write_batch_size_obj = NULL; + PyObject* max_message_size_obj = NULL; + PyObject* doc = NULL; + PyObject* iterator = NULL; + char* flags = ack ? "\x00\x00\x00\x00" : "\x02\x00\x00\x00"; + + max_bson_size_obj = PyObject_GetAttr(ctx, state->_max_bson_size_str); + max_bson_size = PyLong_AsLong(max_bson_size_obj); + Py_XDECREF(max_bson_size_obj); + if (max_bson_size == -1) { + return 0; + } + + max_write_batch_size_obj = PyObject_GetAttr(ctx, state->_max_write_batch_size_str); + max_write_batch_size = PyLong_AsLong(max_write_batch_size_obj); + Py_XDECREF(max_write_batch_size_obj); + if (max_write_batch_size == -1) { + return 0; + } + + max_message_size_obj = PyObject_GetAttr(ctx, state->_max_message_size_str); + max_message_size = PyLong_AsLong(max_message_size_obj); + Py_XDECREF(max_message_size_obj); + if (max_message_size == -1) { + return 0; + } + + if (!buffer_write_bytes(buffer, flags, 4)) { + return 0; + } + /* Type 0 Section */ + if (!buffer_write_bytes(buffer, "\x00", 1)) { + return 0; + } + if (!write_dict(state->_cbson, buffer, command, 0, + &options, 0)) { + return 0; + } + + /* Type 1 Section */ + if (!buffer_write_bytes(buffer, "\x01", 1)) { + return 0; + } + /* Save space for size */ + size_location = pymongo_buffer_save_space(buffer, 4); + if (size_location == -1) { + return 0; + } + + switch (op) { + case _INSERT: + { + if (!buffer_write_bytes(buffer, "documents\x00", 10)) + goto fail; + break; + } + case _UPDATE: + { + if (!buffer_write_bytes(buffer, "updates\x00", 8)) + goto fail; + break; + } + case _DELETE: + { + if (!buffer_write_bytes(buffer, "deletes\x00", 8)) + goto fail; + break; + } + default: + { + PyObject* InvalidOperation = _error("InvalidOperation"); + if (InvalidOperation) { + PyErr_SetString(InvalidOperation, "Unknown command"); + Py_DECREF(InvalidOperation); + } + return 0; + } + } + + iterator = PyObject_GetIter(docs); + if (iterator == NULL) { + PyObject* InvalidOperation = _error("InvalidOperation"); + if (InvalidOperation) { + PyErr_SetString(InvalidOperation, "input is not iterable"); + Py_DECREF(InvalidOperation); + } + return 0; + } + while ((doc = PyIter_Next(iterator)) != NULL) { + int cur_doc_begin = pymongo_buffer_get_position(buffer); + int cur_size; + int doc_too_large = 0; + int unacked_doc_too_large = 0; + if (!write_dict(state->_cbson, buffer, doc, 0, &options, 1)) { + goto fail; + } + cur_size = pymongo_buffer_get_position(buffer) - cur_doc_begin; + + /* Does the first document exceed max_message_size? */ + doc_too_large = (idx == 0 && (pymongo_buffer_get_position(buffer) > max_message_size)); + /* When OP_MSG is used unacknowledged we have to check + * document size client side or applications won't be notified. + * Otherwise we let the server deal with documents that are too large + * since ordered=False causes those documents to be skipped instead of + * halting the bulk write operation. + * */ + unacked_doc_too_large = (!ack && cur_size > max_bson_size); + if (doc_too_large || unacked_doc_too_large) { + if (op == _INSERT) { + _set_document_too_large(cur_size, max_bson_size); + } else { + PyObject* DocumentTooLarge = _error("DocumentTooLarge"); + if (DocumentTooLarge) { + /* + * There's nothing intelligent we can say + * about size for update and delete. + */ + PyErr_Format( + DocumentTooLarge, + "%s command document too large", + (op == _UPDATE) ? "update": "delete"); + Py_DECREF(DocumentTooLarge); + } + } + goto fail; + } + /* We have enough data, return this batch. */ + if (pymongo_buffer_get_position(buffer) > max_message_size) { + /* + * Roll the existing buffer back to the beginning + * of the last document encoded. + */ + pymongo_buffer_update_position(buffer, cur_doc_begin); + Py_CLEAR(doc); + break; + } + if (PyList_Append(to_publish, doc) < 0) { + goto fail; + } + Py_CLEAR(doc); + idx += 1; + /* We have enough documents, return this batch. */ + if (idx == max_write_batch_size) { + break; + } + } + Py_CLEAR(iterator); + + if (PyErr_Occurred()) { + goto fail; + } + + position = pymongo_buffer_get_position(buffer); + length = position - size_location; + buffer_write_int32_at_position(buffer, size_location, (int32_t)length); + return 1; + +fail: + Py_XDECREF(doc); + Py_XDECREF(iterator); + return 0; +} + +static PyObject* +_cbson_encode_batched_op_msg(PyObject* self, PyObject* args) { + unsigned char op; + unsigned char ack; + PyObject* command; + PyObject* docs; + PyObject* ctx = NULL; + PyObject* to_publish = NULL; + PyObject* result = NULL; + PyObject* options_obj; + codec_options_t options; + buffer_t buffer; + struct module_state *state = GETSTATE(self); + + if (!(PyArg_ParseTuple(args, "bOObOO", + &op, &command, &docs, &ack, + &options_obj, &ctx) && + convert_codec_options(state->_cbson, options_obj, &options))) { + return NULL; + } + if (!(buffer = pymongo_buffer_new())) { + destroy_codec_options(&options); + return NULL; + } + if (!(to_publish = PyList_New(0))) { + goto fail; + } + + if (!_batched_op_msg( + op, + ack, + command, + docs, + ctx, + to_publish, + options, + buffer, + state)) { + goto fail; + } + + result = Py_BuildValue("y#O", + pymongo_buffer_get_buffer(buffer), + (Py_ssize_t)pymongo_buffer_get_position(buffer), + to_publish); +fail: + destroy_codec_options(&options); + pymongo_buffer_free(buffer); + Py_XDECREF(to_publish); + return result; +} + +static PyObject* +_cbson_batched_op_msg(PyObject* self, PyObject* args) { + unsigned char op; + unsigned char ack; + int request_id; + int position; + PyObject* command; + PyObject* docs; + PyObject* ctx = NULL; + PyObject* to_publish = NULL; + PyObject* result = NULL; + PyObject* options_obj; + codec_options_t options; + buffer_t buffer; + struct module_state *state = GETSTATE(self); + + if (!(PyArg_ParseTuple(args, "bOObOO", + &op, &command, &docs, &ack, + &options_obj, &ctx) && + convert_codec_options(state->_cbson, options_obj, &options))) { + return NULL; + } + if (!(buffer = pymongo_buffer_new())) { + destroy_codec_options(&options); + return NULL; + } + /* Save space for message length and request id */ + if ((pymongo_buffer_save_space(buffer, 8)) == -1) { + goto fail; + } + if (!buffer_write_bytes(buffer, + "\x00\x00\x00\x00" /* responseTo */ + "\xdd\x07\x00\x00", /* opcode */ + 8)) { + goto fail; + } + if (!(to_publish = PyList_New(0))) { + goto fail; + } + + if (!_batched_op_msg( + op, + ack, + command, + docs, + ctx, + to_publish, + options, + buffer, + state)) { + goto fail; + } + + request_id = rand(); + position = pymongo_buffer_get_position(buffer); + buffer_write_int32_at_position(buffer, 0, (int32_t)position); + buffer_write_int32_at_position(buffer, 4, (int32_t)request_id); + result = Py_BuildValue("iy#O", request_id, + pymongo_buffer_get_buffer(buffer), + (Py_ssize_t)pymongo_buffer_get_position(buffer), + to_publish); +fail: + destroy_codec_options(&options); + pymongo_buffer_free(buffer); + Py_XDECREF(to_publish); + return result; +} + +/* End OP_MSG -------------------------------------------- */ + +static int +_batched_write_command( + char* ns, Py_ssize_t ns_len, unsigned char op, + PyObject* command, PyObject* docs, PyObject* ctx, + PyObject* to_publish, codec_options_t options, + buffer_t buffer, struct module_state *state) { + + long max_bson_size; + long max_cmd_size; + long max_write_batch_size; + long max_split_size; + int idx = 0; + int cmd_len_loc; + int lst_len_loc; + int position; + int length; + PyObject* max_bson_size_obj = NULL; + PyObject* max_write_batch_size_obj = NULL; + PyObject* max_split_size_obj = NULL; + PyObject* doc = NULL; + PyObject* iterator = NULL; + + max_bson_size_obj = PyObject_GetAttr(ctx, state->_max_bson_size_str); + max_bson_size = PyLong_AsLong(max_bson_size_obj); + Py_XDECREF(max_bson_size_obj); + if (max_bson_size == -1) { + return 0; + } + /* + * Max BSON object size + 16k - 2 bytes for ending NUL bytes + * XXX: This should come from the server - SERVER-10643 + */ + max_cmd_size = max_bson_size + 16382; + + max_write_batch_size_obj = PyObject_GetAttr(ctx, state->_max_write_batch_size_str); + max_write_batch_size = PyLong_AsLong(max_write_batch_size_obj); + Py_XDECREF(max_write_batch_size_obj); + if (max_write_batch_size == -1) { + return 0; + } + + // max_split_size is the size at which to perform a batch split. + // Normally this this value is equal to max_bson_size (16MiB). However, + // when auto encryption is enabled max_split_size is reduced to 2MiB. + max_split_size_obj = PyObject_GetAttr(ctx, state->_max_split_size_str); + max_split_size = PyLong_AsLong(max_split_size_obj); + Py_XDECREF(max_split_size_obj); + if (max_split_size == -1) { + return 0; + } + + if (!buffer_write_bytes(buffer, + "\x00\x00\x00\x00", /* flags */ + 4) || + !buffer_write_bytes_ssize_t(buffer, ns, ns_len + 1) || /* namespace */ + !buffer_write_bytes(buffer, + "\x00\x00\x00\x00" /* skip */ + "\xFF\xFF\xFF\xFF", /* limit (-1) */ + 8)) { + return 0; + } + + /* Position of command document length */ + cmd_len_loc = pymongo_buffer_get_position(buffer); + if (!write_dict(state->_cbson, buffer, command, 0, + &options, 0)) { + return 0; + } + + /* Write type byte for array */ + *(pymongo_buffer_get_buffer(buffer) + (pymongo_buffer_get_position(buffer) - 1)) = 0x4; + + switch (op) { + case _INSERT: + { + if (!buffer_write_bytes(buffer, "documents\x00", 10)) + goto fail; + break; + } + case _UPDATE: + { + if (!buffer_write_bytes(buffer, "updates\x00", 8)) + goto fail; + break; + } + case _DELETE: + { + if (!buffer_write_bytes(buffer, "deletes\x00", 8)) + goto fail; + break; + } + default: + { + PyObject* InvalidOperation = _error("InvalidOperation"); + if (InvalidOperation) { + PyErr_SetString(InvalidOperation, "Unknown command"); + Py_DECREF(InvalidOperation); + } + return 0; + } + } + + /* Save space for list document */ + lst_len_loc = pymongo_buffer_save_space(buffer, 4); + if (lst_len_loc == -1) { + return 0; + } + + iterator = PyObject_GetIter(docs); + if (iterator == NULL) { + PyObject* InvalidOperation = _error("InvalidOperation"); + if (InvalidOperation) { + PyErr_SetString(InvalidOperation, "input is not iterable"); + Py_DECREF(InvalidOperation); + } + return 0; + } + while ((doc = PyIter_Next(iterator)) != NULL) { + int sub_doc_begin = pymongo_buffer_get_position(buffer); + int cur_doc_begin; + int cur_size; + int enough_data = 0; + char key[BUF_SIZE]; + int res = LL2STR(key, (long long)idx); + if (res == -1) { + return 0; + } + if (!buffer_write_bytes(buffer, "\x03", 1) || + !buffer_write_bytes(buffer, key, (int)strlen(key) + 1)) { + goto fail; + } + cur_doc_begin = pymongo_buffer_get_position(buffer); + if (!write_dict(state->_cbson, buffer, doc, 0, &options, 1)) { + goto fail; + } + + /* We have enough data, return this batch. + * max_cmd_size accounts for the two trailing null bytes. + */ + cur_size = pymongo_buffer_get_position(buffer) - cur_doc_begin; + /* This single document is too large for the command. */ + if (cur_size > max_cmd_size) { + if (op == _INSERT) { + _set_document_too_large(cur_size, max_bson_size); + } else { + PyObject* DocumentTooLarge = _error("DocumentTooLarge"); + if (DocumentTooLarge) { + /* + * There's nothing intelligent we can say + * about size for update and delete. + */ + PyErr_Format( + DocumentTooLarge, + "%s command document too large", + (op == _UPDATE) ? "update": "delete"); + Py_DECREF(DocumentTooLarge); + } + } + goto fail; + } + enough_data = (idx >= 1 && + (pymongo_buffer_get_position(buffer) > max_split_size)); + if (enough_data) { + /* + * Roll the existing buffer back to the beginning + * of the last document encoded. + */ + pymongo_buffer_update_position(buffer, sub_doc_begin); + Py_CLEAR(doc); + break; + } + if (PyList_Append(to_publish, doc) < 0) { + goto fail; + } + Py_CLEAR(doc); + idx += 1; + /* We have enough documents, return this batch. */ + if (idx == max_write_batch_size) { + break; + } + } + Py_CLEAR(iterator); + + if (PyErr_Occurred()) { + goto fail; + } + + if (!buffer_write_bytes(buffer, "\x00\x00", 2)) { + goto fail; + } + + position = pymongo_buffer_get_position(buffer); + length = position - lst_len_loc - 1; + buffer_write_int32_at_position(buffer, lst_len_loc, (int32_t)length); + length = position - cmd_len_loc; + buffer_write_int32_at_position(buffer, cmd_len_loc, (int32_t)length); + return 1; + +fail: + Py_XDECREF(doc); + Py_XDECREF(iterator); + return 0; +} + +static PyObject* +_cbson_encode_batched_write_command(PyObject* self, PyObject* args) { + char *ns = NULL; + unsigned char op; + Py_ssize_t ns_len; + PyObject* command; + PyObject* docs; + PyObject* ctx = NULL; + PyObject* to_publish = NULL; + PyObject* result = NULL; + PyObject* options_obj; + codec_options_t options; + buffer_t buffer; + struct module_state *state = GETSTATE(self); + + if (!(PyArg_ParseTuple(args, "et#bOOOO", "utf-8", + &ns, &ns_len, &op, &command, &docs, + &options_obj, &ctx) && + convert_codec_options(state->_cbson, options_obj, &options))) { + return NULL; + } + if (!(buffer = pymongo_buffer_new())) { + PyMem_Free(ns); + destroy_codec_options(&options); + return NULL; + } + if (!(to_publish = PyList_New(0))) { + goto fail; + } + + if (!_batched_write_command( + ns, + ns_len, + op, + command, + docs, + ctx, + to_publish, + options, + buffer, + state)) { + goto fail; + } + + result = Py_BuildValue("y#O", + pymongo_buffer_get_buffer(buffer), + (Py_ssize_t)pymongo_buffer_get_position(buffer), + to_publish); +fail: + PyMem_Free(ns); + destroy_codec_options(&options); + pymongo_buffer_free(buffer); + Py_XDECREF(to_publish); + return result; +} + +static PyMethodDef _CMessageMethods[] = { + {"_query_message", _cbson_query_message, METH_VARARGS, + "create a query message to be sent to MongoDB"}, + {"_get_more_message", _cbson_get_more_message, METH_VARARGS, + "create a get more message to be sent to MongoDB"}, + {"_op_msg", _cbson_op_msg, METH_VARARGS, + "create an OP_MSG message to be sent to MongoDB"}, + {"_encode_batched_write_command", _cbson_encode_batched_write_command, METH_VARARGS, + "Encode the next batched insert, update, or delete command"}, + {"_batched_op_msg", _cbson_batched_op_msg, METH_VARARGS, + "Create the next batched insert, update, or delete using OP_MSG"}, + {"_encode_batched_op_msg", _cbson_encode_batched_op_msg, METH_VARARGS, + "Encode the next batched insert, update, or delete using OP_MSG"}, + {NULL, NULL, 0, NULL} +}; + +#define INITERROR return NULL +static int _cmessage_traverse(PyObject *m, visitproc visit, void *arg) { + Py_VISIT(GETSTATE(m)->_cbson); + Py_VISIT(GETSTATE(m)->_max_bson_size_str); + Py_VISIT(GETSTATE(m)->_max_message_size_str); + Py_VISIT(GETSTATE(m)->_max_split_size_str); + Py_VISIT(GETSTATE(m)->_max_write_batch_size_str); + return 0; +} + +static int _cmessage_clear(PyObject *m) { + Py_CLEAR(GETSTATE(m)->_cbson); + Py_CLEAR(GETSTATE(m)->_max_bson_size_str); + Py_CLEAR(GETSTATE(m)->_max_message_size_str); + Py_CLEAR(GETSTATE(m)->_max_split_size_str); + Py_CLEAR(GETSTATE(m)->_max_write_batch_size_str); + return 0; +} + +static struct PyModuleDef moduledef = { + PyModuleDef_HEAD_INIT, + "_cmessage", + NULL, + sizeof(struct module_state), + _CMessageMethods, + NULL, + _cmessage_traverse, + _cmessage_clear, + NULL +}; + +PyMODINIT_FUNC +PyInit__cmessage(void) +{ + PyObject *_cbson = NULL; + PyObject *c_api_object = NULL; + PyObject *m = NULL; + struct module_state* state = NULL; + + /* Store a reference to the _cbson module since it's needed to call some + * of its functions + */ + _cbson = PyImport_ImportModule("bson._cbson"); + if (_cbson == NULL) { + goto fail; + } + + /* Import C API of _cbson + * The header file accesses _cbson_API to call the functions + */ + c_api_object = PyObject_GetAttrString(_cbson, "_C_API"); + if (c_api_object == NULL) { + goto fail; + } + _cbson_API = (void **)PyCapsule_GetPointer(c_api_object, "_cbson._C_API"); + if (_cbson_API == NULL) { + goto fail; + } + + /* Returns a new reference. */ + m = PyModule_Create(&moduledef); + if (m == NULL) { + goto fail; + } + + state = GETSTATE(m); + state->_cbson = _cbson; + if (!((state->_max_bson_size_str = PyUnicode_FromString("max_bson_size")) && + (state->_max_message_size_str = PyUnicode_FromString("max_message_size")) && + (state->_max_write_batch_size_str = PyUnicode_FromString("max_write_batch_size")) && + (state->_max_split_size_str = PyUnicode_FromString("max_split_size")))) { + goto fail; + } + + Py_DECREF(c_api_object); + + return m; + +fail: + Py_XDECREF(m); + Py_XDECREF(c_api_object); + Py_XDECREF(_cbson); + INITERROR; +} diff --git a/backend/test/lib/python3.8/site-packages/pymongo/_csot.py b/backend/test/lib/python3.8/site-packages/pymongo/_csot.py new file mode 100644 index 0000000000000000000000000000000000000000..9ad477a2495bd78cebd5f3adc19319f16118c075 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/_csot.py @@ -0,0 +1,151 @@ +# Copyright 2022-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Internal helpers for CSOT.""" + +from __future__ import annotations + +import functools +import time +from collections import deque +from contextvars import ContextVar, Token +from typing import Any, Callable, Deque, MutableMapping, Optional, Tuple, TypeVar, cast + +from pymongo.write_concern import WriteConcern + +TIMEOUT: ContextVar[Optional[float]] = ContextVar("TIMEOUT", default=None) +RTT: ContextVar[float] = ContextVar("RTT", default=0.0) +DEADLINE: ContextVar[float] = ContextVar("DEADLINE", default=float("inf")) + + +def get_timeout() -> Optional[float]: + return TIMEOUT.get(None) + + +def get_rtt() -> float: + return RTT.get() + + +def get_deadline() -> float: + return DEADLINE.get() + + +def set_rtt(rtt: float) -> None: + RTT.set(rtt) + + +def remaining() -> Optional[float]: + if not get_timeout(): + return None + return DEADLINE.get() - time.monotonic() + + +def clamp_remaining(max_timeout: float) -> float: + """Return the remaining timeout clamped to a max value.""" + timeout = remaining() + if timeout is None: + return max_timeout + return min(timeout, max_timeout) + + +class _TimeoutContext: + """Internal timeout context manager. + + Use :func:`pymongo.timeout` instead:: + + with pymongo.timeout(0.5): + client.test.test.insert_one({}) + """ + + __slots__ = ("_timeout", "_tokens") + + def __init__(self, timeout: Optional[float]): + self._timeout = timeout + self._tokens: Optional[Tuple[Token, Token, Token]] = None + + def __enter__(self) -> _TimeoutContext: + timeout_token = TIMEOUT.set(self._timeout) + prev_deadline = DEADLINE.get() + next_deadline = time.monotonic() + self._timeout if self._timeout else float("inf") + deadline_token = DEADLINE.set(min(prev_deadline, next_deadline)) + rtt_token = RTT.set(0.0) + self._tokens = (timeout_token, deadline_token, rtt_token) + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + if self._tokens: + timeout_token, deadline_token, rtt_token = self._tokens + TIMEOUT.reset(timeout_token) + DEADLINE.reset(deadline_token) + RTT.reset(rtt_token) + + +# See https://mypy.readthedocs.io/en/stable/generics.html?#decorator-factories +F = TypeVar("F", bound=Callable[..., Any]) + + +def apply(func: F) -> F: + """Apply the client's timeoutMS to this operation.""" + + @functools.wraps(func) + def csot_wrapper(self: Any, *args: Any, **kwargs: Any) -> F: + if get_timeout() is None: + timeout = self._timeout + if timeout is not None: + with _TimeoutContext(timeout): + return func(self, *args, **kwargs) + return func(self, *args, **kwargs) + + return cast(F, csot_wrapper) + + +def apply_write_concern(cmd: MutableMapping, write_concern: Optional[WriteConcern]) -> None: + """Apply the given write concern to a command.""" + if not write_concern or write_concern.is_server_default: + return + wc = write_concern.document + if get_timeout() is not None: + wc.pop("wtimeout", None) + if wc: + cmd["writeConcern"] = wc + + +_MAX_RTT_SAMPLES: int = 10 +_MIN_RTT_SAMPLES: int = 2 + + +class MovingMinimum: + """Tracks a minimum RTT within the last 10 RTT samples.""" + + samples: Deque[float] + + def __init__(self) -> None: + self.samples = deque(maxlen=_MAX_RTT_SAMPLES) + + def add_sample(self, sample: float) -> None: + if sample < 0: + # Likely system time change while waiting for hello response + # and not using time.monotonic. Ignore it, the next one will + # probably be valid. + return + self.samples.append(sample) + + def get(self) -> float: + """Get the min, or 0.0 if there aren't enough samples yet.""" + if len(self.samples) >= _MIN_RTT_SAMPLES: + return min(self.samples) + return 0.0 + + def reset(self) -> None: + self.samples.clear() diff --git a/backend/test/lib/python3.8/site-packages/pymongo/_version.py b/backend/test/lib/python3.8/site-packages/pymongo/_version.py new file mode 100644 index 0000000000000000000000000000000000000000..d4a37e3cd60488a537b52b82eec0378c9d4ddc63 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/_version.py @@ -0,0 +1,28 @@ +# Copyright 2022-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Current version of PyMongo.""" +from typing import Tuple, Union + +version_tuple: Tuple[Union[int, str], ...] = (4, 5, 0) + + +def get_version_string() -> str: + if isinstance(version_tuple[-1], str): + return ".".join(map(str, version_tuple[:-1])) + version_tuple[-1] + return ".".join(map(str, version_tuple)) + + +__version__: str = get_version_string() +version = __version__ diff --git a/backend/test/lib/python3.8/site-packages/pymongo/aggregation.py b/backend/test/lib/python3.8/site-packages/pymongo/aggregation.py new file mode 100644 index 0000000000000000000000000000000000000000..ef6af1092e1c1055a9735cfe509c2103814ea371 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/aggregation.py @@ -0,0 +1,257 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Perform aggregation operations on a collection or database.""" +from __future__ import annotations + +from collections.abc import Callable, Mapping, MutableMapping +from typing import TYPE_CHECKING, Any, Optional, Union + +from bson.son import SON +from pymongo import common +from pymongo.collation import validate_collation_or_none +from pymongo.errors import ConfigurationError +from pymongo.read_preferences import ReadPreference, _AggWritePref + +if TYPE_CHECKING: + from pymongo.client_session import ClientSession + from pymongo.collection import Collection + from pymongo.command_cursor import CommandCursor + from pymongo.database import Database + from pymongo.pool import Connection + from pymongo.read_preferences import _ServerMode + from pymongo.server import Server + from pymongo.typings import _DocumentType, _Pipeline + + +class _AggregationCommand: + """The internal abstract base class for aggregation cursors. + + Should not be called directly by application developers. Use + :meth:`pymongo.collection.Collection.aggregate`, or + :meth:`pymongo.database.Database.aggregate` instead. + """ + + def __init__( + self, + target: Union[Database, Collection], + cursor_class: type[CommandCursor], + pipeline: _Pipeline, + options: MutableMapping[str, Any], + explicit_session: bool, + let: Optional[Mapping[str, Any]] = None, + user_fields: Optional[MutableMapping[str, Any]] = None, + result_processor: Optional[Callable[[Mapping[str, Any], Connection], None]] = None, + comment: Any = None, + ) -> None: + if "explain" in options: + raise ConfigurationError( + "The explain option is not supported. Use Database.command instead." + ) + + self._target = target + + pipeline = common.validate_list("pipeline", pipeline) + self._pipeline = pipeline + self._performs_write = False + if pipeline and ("$out" in pipeline[-1] or "$merge" in pipeline[-1]): + self._performs_write = True + + common.validate_is_mapping("options", options) + if let is not None: + common.validate_is_mapping("let", let) + options["let"] = let + if comment is not None: + options["comment"] = comment + + self._options = options + + # This is the batchSize that will be used for setting the initial + # batchSize for the cursor, as well as the subsequent getMores. + self._batch_size = common.validate_non_negative_integer_or_none( + "batchSize", self._options.pop("batchSize", None) + ) + + # If the cursor option is already specified, avoid overriding it. + self._options.setdefault("cursor", {}) + # If the pipeline performs a write, we ignore the initial batchSize + # since the server doesn't return results in this case. + if self._batch_size is not None and not self._performs_write: + self._options["cursor"]["batchSize"] = self._batch_size + + self._cursor_class = cursor_class + self._explicit_session = explicit_session + self._user_fields = user_fields + self._result_processor = result_processor + + self._collation = validate_collation_or_none(options.pop("collation", None)) + + self._max_await_time_ms = options.pop("maxAwaitTimeMS", None) + self._write_preference: Optional[_AggWritePref] = None + + @property + def _aggregation_target(self) -> Union[str, int]: + """The argument to pass to the aggregate command.""" + raise NotImplementedError + + @property + def _cursor_namespace(self) -> str: + """The namespace in which the aggregate command is run.""" + raise NotImplementedError + + def _cursor_collection(self, cursor_doc: Mapping[str, Any]) -> Collection: + """The Collection used for the aggregate command cursor.""" + raise NotImplementedError + + @property + def _database(self) -> Database: + """The database against which the aggregation command is run.""" + raise NotImplementedError + + def get_read_preference( + self, session: Optional[ClientSession] + ) -> Union[_AggWritePref, _ServerMode]: + + if self._write_preference: + return self._write_preference + pref = self._target._read_preference_for(session) + if self._performs_write and pref != ReadPreference.PRIMARY: + self._write_preference = pref = _AggWritePref(pref) # type: ignore[assignment] + return pref + + def get_cursor( + self, + session: Optional[ClientSession], + server: Server, + conn: Connection, + read_preference: _ServerMode, + ) -> CommandCursor[_DocumentType]: + # Serialize command. + cmd = SON([("aggregate", self._aggregation_target), ("pipeline", self._pipeline)]) + cmd.update(self._options) + + # Apply this target's read concern if: + # readConcern has not been specified as a kwarg and either + # - server version is >= 4.2 or + # - server version is >= 3.2 and pipeline doesn't use $out + if ("readConcern" not in cmd) and ( + not self._performs_write or (conn.max_wire_version >= 8) + ): + read_concern = self._target.read_concern + else: + read_concern = None + + # Apply this target's write concern if: + # writeConcern has not been specified as a kwarg and pipeline doesn't + # perform a write operation + if "writeConcern" not in cmd and self._performs_write: + write_concern = self._target._write_concern_for(session) + else: + write_concern = None + + # Run command. + result = conn.command( + self._database.name, + cmd, + read_preference, + self._target.codec_options, + parse_write_concern_error=True, + read_concern=read_concern, + write_concern=write_concern, + collation=self._collation, + session=session, + client=self._database.client, + user_fields=self._user_fields, + ) + + if self._result_processor: + self._result_processor(result, conn) + + # Extract cursor from result or mock/fake one if necessary. + if "cursor" in result: + cursor = result["cursor"] + else: + # Unacknowledged $out/$merge write. Fake a cursor. + cursor = { + "id": 0, + "firstBatch": result.get("result", []), + "ns": self._cursor_namespace, + } + + # Create and return cursor instance. + cmd_cursor = self._cursor_class( + self._cursor_collection(cursor), + cursor, + conn.address, + batch_size=self._batch_size or 0, + max_await_time_ms=self._max_await_time_ms, + session=session, + explicit_session=self._explicit_session, + comment=self._options.get("comment"), + ) + cmd_cursor._maybe_pin_connection(conn) + return cmd_cursor + + +class _CollectionAggregationCommand(_AggregationCommand): + _target: Collection + + @property + def _aggregation_target(self) -> str: + return self._target.name + + @property + def _cursor_namespace(self) -> str: + return self._target.full_name + + def _cursor_collection(self, cursor: Mapping[str, Any]) -> Collection: + """The Collection used for the aggregate command cursor.""" + return self._target + + @property + def _database(self) -> Database: + return self._target.database + + +class _CollectionRawAggregationCommand(_CollectionAggregationCommand): + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + # For raw-batches, we set the initial batchSize for the cursor to 0. + if not self._performs_write: + self._options["cursor"]["batchSize"] = 0 + + +class _DatabaseAggregationCommand(_AggregationCommand): + _target: Database + + @property + def _aggregation_target(self) -> int: + return 1 + + @property + def _cursor_namespace(self) -> str: + return f"{self._target.name}.$cmd.aggregate" + + @property + def _database(self) -> Database: + return self._target + + def _cursor_collection(self, cursor: Mapping[str, Any]) -> Collection: + """The Collection used for the aggregate command cursor.""" + # Collection level aggregate may not always return the "ns" field + # according to our MockupDB tests. Let's handle that case for db level + # aggregate too by defaulting to the <db>.$cmd.aggregate namespace. + _, collname = cursor.get("ns", self._cursor_namespace).split(".", 1) + return self._database[collname] diff --git a/backend/test/lib/python3.8/site-packages/pymongo/auth.py b/backend/test/lib/python3.8/site-packages/pymongo/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..f8e35f9c19e681d3ef8abeaa3fc01c0fd66512af --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/auth.py @@ -0,0 +1,616 @@ +# Copyright 2013-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Authentication helpers.""" +from __future__ import annotations + +import functools +import hashlib +import hmac +import os +import socket +import typing +from base64 import standard_b64decode, standard_b64encode +from collections import namedtuple +from typing import TYPE_CHECKING, Any, Callable, Mapping, MutableMapping, Optional +from urllib.parse import quote + +from bson.binary import Binary +from bson.son import SON +from pymongo.auth_aws import _authenticate_aws +from pymongo.auth_oidc import _authenticate_oidc, _get_authenticator, _OIDCProperties +from pymongo.errors import ConfigurationError, OperationFailure +from pymongo.saslprep import saslprep + +if TYPE_CHECKING: + from pymongo.hello import Hello + from pymongo.pool import Connection + +HAVE_KERBEROS = True +_USE_PRINCIPAL = False +try: + import winkerberos as kerberos + + if tuple(map(int, kerberos.__version__.split(".")[:2])) >= (0, 5): + _USE_PRINCIPAL = True +except ImportError: + try: + import kerberos + except ImportError: + HAVE_KERBEROS = False + + +MECHANISMS = frozenset( + [ + "GSSAPI", + "MONGODB-CR", + "MONGODB-OIDC", + "MONGODB-X509", + "MONGODB-AWS", + "PLAIN", + "SCRAM-SHA-1", + "SCRAM-SHA-256", + "DEFAULT", + ] +) +"""The authentication mechanisms supported by PyMongo.""" + + +class _Cache: + __slots__ = ("data",) + + _hash_val = hash("_Cache") + + def __init__(self) -> None: + self.data = None + + def __eq__(self, other: object) -> bool: + # Two instances must always compare equal. + if isinstance(other, _Cache): + return True + return NotImplemented + + def __ne__(self, other: object) -> bool: + if isinstance(other, _Cache): + return False + return NotImplemented + + def __hash__(self) -> int: + return self._hash_val + + +MongoCredential = namedtuple( + "MongoCredential", + ["mechanism", "source", "username", "password", "mechanism_properties", "cache"], +) +"""A hashable namedtuple of values used for authentication.""" + + +GSSAPIProperties = namedtuple( + "GSSAPIProperties", ["service_name", "canonicalize_host_name", "service_realm"] +) +"""Mechanism properties for GSSAPI authentication.""" + + +_AWSProperties = namedtuple("_AWSProperties", ["aws_session_token"]) +"""Mechanism properties for MONGODB-AWS authentication.""" + + +def _build_credentials_tuple( + mech: str, + source: Optional[str], + user: str, + passwd: str, + extra: Mapping[str, Any], + database: Optional[str], +) -> MongoCredential: + """Build and return a mechanism specific credentials tuple.""" + if mech not in ("MONGODB-X509", "MONGODB-AWS", "MONGODB-OIDC") and user is None: + raise ConfigurationError(f"{mech} requires a username.") + if mech == "GSSAPI": + if source is not None and source != "$external": + raise ValueError("authentication source must be $external or None for GSSAPI") + properties = extra.get("authmechanismproperties", {}) + service_name = properties.get("SERVICE_NAME", "mongodb") + canonicalize = properties.get("CANONICALIZE_HOST_NAME", False) + service_realm = properties.get("SERVICE_REALM") + props = GSSAPIProperties( + service_name=service_name, + canonicalize_host_name=canonicalize, + service_realm=service_realm, + ) + # Source is always $external. + return MongoCredential(mech, "$external", user, passwd, props, None) + elif mech == "MONGODB-X509": + if passwd is not None: + raise ConfigurationError("Passwords are not supported by MONGODB-X509") + if source is not None and source != "$external": + raise ValueError("authentication source must be $external or None for MONGODB-X509") + # Source is always $external, user can be None. + return MongoCredential(mech, "$external", user, None, None, None) + elif mech == "MONGODB-AWS": + if user is not None and passwd is None: + raise ConfigurationError("username without a password is not supported by MONGODB-AWS") + if source is not None and source != "$external": + raise ConfigurationError( + "authentication source must be $external or None for MONGODB-AWS" + ) + + properties = extra.get("authmechanismproperties", {}) + aws_session_token = properties.get("AWS_SESSION_TOKEN") + aws_props = _AWSProperties(aws_session_token=aws_session_token) + # user can be None for temporary link-local EC2 credentials. + return MongoCredential(mech, "$external", user, passwd, aws_props, None) + elif mech == "MONGODB-OIDC": + properties = extra.get("authmechanismproperties", {}) + request_token_callback = properties.get("request_token_callback") + refresh_token_callback = properties.get("refresh_token_callback", None) + provider_name = properties.get("PROVIDER_NAME", "") + default_allowed = [ + "*.mongodb.net", + "*.mongodb-dev.net", + "*.mongodbgov.net", + "localhost", + "127.0.0.1", + "::1", + ] + allowed_hosts = properties.get("allowed_hosts", default_allowed) + if not request_token_callback and provider_name != "aws": + raise ConfigurationError( + "authentication with MONGODB-OIDC requires providing an request_token_callback or a provider_name of 'aws'" + ) + oidc_props = _OIDCProperties( + request_token_callback=request_token_callback, + refresh_token_callback=refresh_token_callback, + provider_name=provider_name, + allowed_hosts=allowed_hosts, + ) + return MongoCredential(mech, "$external", user, passwd, oidc_props, None) + + elif mech == "PLAIN": + source_database = source or database or "$external" + return MongoCredential(mech, source_database, user, passwd, None, None) + else: + source_database = source or database or "admin" + if passwd is None: + raise ConfigurationError("A password is required.") + return MongoCredential(mech, source_database, user, passwd, None, _Cache()) + + +def _xor(fir: bytes, sec: bytes) -> bytes: + """XOR two byte strings together.""" + return b"".join([bytes([x ^ y]) for x, y in zip(fir, sec)]) + + +def _parse_scram_response(response: bytes) -> dict: + """Split a scram response into key, value pairs.""" + return dict( + typing.cast(typing.Tuple[str, str], item.split(b"=", 1)) for item in response.split(b",") + ) + + +def _authenticate_scram_start( + credentials: MongoCredential, mechanism: str +) -> tuple[bytes, bytes, MutableMapping[str, Any]]: + username = credentials.username + user = username.encode("utf-8").replace(b"=", b"=3D").replace(b",", b"=2C") + nonce = standard_b64encode(os.urandom(32)) + first_bare = b"n=" + user + b",r=" + nonce + + cmd = SON( + [ + ("saslStart", 1), + ("mechanism", mechanism), + ("payload", Binary(b"n,," + first_bare)), + ("autoAuthorize", 1), + ("options", {"skipEmptyExchange": True}), + ] + ) + return nonce, first_bare, cmd + + +def _authenticate_scram(credentials: MongoCredential, conn: Connection, mechanism: str) -> None: + """Authenticate using SCRAM.""" + username = credentials.username + if mechanism == "SCRAM-SHA-256": + digest = "sha256" + digestmod = hashlib.sha256 + data = saslprep(credentials.password).encode("utf-8") + else: + digest = "sha1" + digestmod = hashlib.sha1 + data = _password_digest(username, credentials.password).encode("utf-8") + source = credentials.source + cache = credentials.cache + + # Make local + _hmac = hmac.HMAC + + ctx = conn.auth_ctx + if ctx and ctx.speculate_succeeded(): + assert isinstance(ctx, _ScramContext) + assert ctx.scram_data is not None + nonce, first_bare = ctx.scram_data + res = ctx.speculative_authenticate + else: + nonce, first_bare, cmd = _authenticate_scram_start(credentials, mechanism) + res = conn.command(source, cmd) + + assert res is not None + server_first = res["payload"] + parsed = _parse_scram_response(server_first) + iterations = int(parsed[b"i"]) + if iterations < 4096: + raise OperationFailure("Server returned an invalid iteration count.") + salt = parsed[b"s"] + rnonce = parsed[b"r"] + if not rnonce.startswith(nonce): + raise OperationFailure("Server returned an invalid nonce.") + + without_proof = b"c=biws,r=" + rnonce + if cache.data: + client_key, server_key, csalt, citerations = cache.data + else: + client_key, server_key, csalt, citerations = None, None, None, None + + # Salt and / or iterations could change for a number of different + # reasons. Either changing invalidates the cache. + if not client_key or salt != csalt or iterations != citerations: + salted_pass = hashlib.pbkdf2_hmac(digest, data, standard_b64decode(salt), iterations) + client_key = _hmac(salted_pass, b"Client Key", digestmod).digest() + server_key = _hmac(salted_pass, b"Server Key", digestmod).digest() + cache.data = (client_key, server_key, salt, iterations) + stored_key = digestmod(client_key).digest() + auth_msg = b",".join((first_bare, server_first, without_proof)) + client_sig = _hmac(stored_key, auth_msg, digestmod).digest() + client_proof = b"p=" + standard_b64encode(_xor(client_key, client_sig)) + client_final = b",".join((without_proof, client_proof)) + + server_sig = standard_b64encode(_hmac(server_key, auth_msg, digestmod).digest()) + + cmd = SON( + [ + ("saslContinue", 1), + ("conversationId", res["conversationId"]), + ("payload", Binary(client_final)), + ] + ) + res = conn.command(source, cmd) + + parsed = _parse_scram_response(res["payload"]) + if not hmac.compare_digest(parsed[b"v"], server_sig): + raise OperationFailure("Server returned an invalid signature.") + + # A third empty challenge may be required if the server does not support + # skipEmptyExchange: SERVER-44857. + if not res["done"]: + cmd = SON( + [ + ("saslContinue", 1), + ("conversationId", res["conversationId"]), + ("payload", Binary(b"")), + ] + ) + res = conn.command(source, cmd) + if not res["done"]: + raise OperationFailure("SASL conversation failed to complete.") + + +def _password_digest(username: str, password: str) -> str: + """Get a password digest to use for authentication.""" + if not isinstance(password, str): + raise TypeError("password must be an instance of str") + if len(password) == 0: + raise ValueError("password can't be empty") + if not isinstance(username, str): + raise TypeError("username must be an instance of str") + + md5hash = hashlib.md5() + data = f"{username}:mongo:{password}" + md5hash.update(data.encode("utf-8")) + return md5hash.hexdigest() + + +def _auth_key(nonce: str, username: str, password: str) -> str: + """Get an auth key to use for authentication.""" + digest = _password_digest(username, password) + md5hash = hashlib.md5() + data = f"{nonce}{username}{digest}" + md5hash.update(data.encode("utf-8")) + return md5hash.hexdigest() + + +def _canonicalize_hostname(hostname: str) -> str: + """Canonicalize hostname following MIT-krb5 behavior.""" + # https://github.com/krb5/krb5/blob/d406afa363554097ac48646a29249c04f498c88e/src/util/k5test.py#L505-L520 + af, socktype, proto, canonname, sockaddr = socket.getaddrinfo( + hostname, None, 0, 0, socket.IPPROTO_TCP, socket.AI_CANONNAME + )[0] + + try: + name = socket.getnameinfo(sockaddr, socket.NI_NAMEREQD) + except socket.gaierror: + return canonname.lower() + + return name[0].lower() + + +def _authenticate_gssapi(credentials: MongoCredential, conn: Connection) -> None: + """Authenticate using GSSAPI.""" + if not HAVE_KERBEROS: + raise ConfigurationError( + 'The "kerberos" module must be installed to use GSSAPI authentication.' + ) + + try: + username = credentials.username + password = credentials.password + props = credentials.mechanism_properties + # Starting here and continuing through the while loop below - establish + # the security context. See RFC 4752, Section 3.1, first paragraph. + host = conn.address[0] + if props.canonicalize_host_name: + host = _canonicalize_hostname(host) + service = props.service_name + "@" + host + if props.service_realm is not None: + service = service + "@" + props.service_realm + + if password is not None: + if _USE_PRINCIPAL: + # Note that, though we use unquote_plus for unquoting URI + # options, we use quote here. Microsoft's UrlUnescape (used + # by WinKerberos) doesn't support +. + principal = ":".join((quote(username), quote(password))) + result, ctx = kerberos.authGSSClientInit( + service, principal, gssflags=kerberos.GSS_C_MUTUAL_FLAG + ) + else: + if "@" in username: + user, domain = username.split("@", 1) + else: + user, domain = username, None + result, ctx = kerberos.authGSSClientInit( + service, + gssflags=kerberos.GSS_C_MUTUAL_FLAG, + user=user, + domain=domain, + password=password, + ) + else: + result, ctx = kerberos.authGSSClientInit(service, gssflags=kerberos.GSS_C_MUTUAL_FLAG) + + if result != kerberos.AUTH_GSS_COMPLETE: + raise OperationFailure("Kerberos context failed to initialize.") + + try: + # pykerberos uses a weird mix of exceptions and return values + # to indicate errors. + # 0 == continue, 1 == complete, -1 == error + # Only authGSSClientStep can return 0. + if kerberos.authGSSClientStep(ctx, "") != 0: + raise OperationFailure("Unknown kerberos failure in step function.") + + # Start a SASL conversation with mongod/s + # Note: pykerberos deals with base64 encoded byte strings. + # Since mongo accepts base64 strings as the payload we don't + # have to use bson.binary.Binary. + payload = kerberos.authGSSClientResponse(ctx) + cmd = SON( + [ + ("saslStart", 1), + ("mechanism", "GSSAPI"), + ("payload", payload), + ("autoAuthorize", 1), + ] + ) + response = conn.command("$external", cmd) + + # Limit how many times we loop to catch protocol / library issues + for _ in range(10): + result = kerberos.authGSSClientStep(ctx, str(response["payload"])) + if result == -1: + raise OperationFailure("Unknown kerberos failure in step function.") + + payload = kerberos.authGSSClientResponse(ctx) or "" + + cmd = SON( + [ + ("saslContinue", 1), + ("conversationId", response["conversationId"]), + ("payload", payload), + ] + ) + response = conn.command("$external", cmd) + + if result == kerberos.AUTH_GSS_COMPLETE: + break + else: + raise OperationFailure("Kerberos authentication failed to complete.") + + # Once the security context is established actually authenticate. + # See RFC 4752, Section 3.1, last two paragraphs. + if kerberos.authGSSClientUnwrap(ctx, str(response["payload"])) != 1: + raise OperationFailure("Unknown kerberos failure during GSS_Unwrap step.") + + if kerberos.authGSSClientWrap(ctx, kerberos.authGSSClientResponse(ctx), username) != 1: + raise OperationFailure("Unknown kerberos failure during GSS_Wrap step.") + + payload = kerberos.authGSSClientResponse(ctx) + cmd = SON( + [ + ("saslContinue", 1), + ("conversationId", response["conversationId"]), + ("payload", payload), + ] + ) + conn.command("$external", cmd) + + finally: + kerberos.authGSSClientClean(ctx) + + except kerberos.KrbError as exc: + raise OperationFailure(str(exc)) + + +def _authenticate_plain(credentials: MongoCredential, conn: Connection) -> None: + """Authenticate using SASL PLAIN (RFC 4616)""" + source = credentials.source + username = credentials.username + password = credentials.password + payload = (f"\x00{username}\x00{password}").encode() + cmd = SON( + [ + ("saslStart", 1), + ("mechanism", "PLAIN"), + ("payload", Binary(payload)), + ("autoAuthorize", 1), + ] + ) + conn.command(source, cmd) + + +def _authenticate_x509(credentials: MongoCredential, conn: Connection) -> None: + """Authenticate using MONGODB-X509.""" + ctx = conn.auth_ctx + if ctx and ctx.speculate_succeeded(): + # MONGODB-X509 is done after the speculative auth step. + return + + cmd = _X509Context(credentials, conn.address).speculate_command() + conn.command("$external", cmd) + + +def _authenticate_mongo_cr(credentials: MongoCredential, conn: Connection) -> None: + """Authenticate using MONGODB-CR.""" + source = credentials.source + username = credentials.username + password = credentials.password + # Get a nonce + response = conn.command(source, {"getnonce": 1}) + nonce = response["nonce"] + key = _auth_key(nonce, username, password) + + # Actually authenticate + query = SON([("authenticate", 1), ("user", username), ("nonce", nonce), ("key", key)]) + conn.command(source, query) + + +def _authenticate_default(credentials: MongoCredential, conn: Connection) -> None: + if conn.max_wire_version >= 7: + if conn.negotiated_mechs: + mechs = conn.negotiated_mechs + else: + source = credentials.source + cmd = conn.hello_cmd() + cmd["saslSupportedMechs"] = source + "." + credentials.username + mechs = conn.command(source, cmd, publish_events=False).get("saslSupportedMechs", []) + if "SCRAM-SHA-256" in mechs: + return _authenticate_scram(credentials, conn, "SCRAM-SHA-256") + else: + return _authenticate_scram(credentials, conn, "SCRAM-SHA-1") + else: + return _authenticate_scram(credentials, conn, "SCRAM-SHA-1") + + +_AUTH_MAP: Mapping[str, Callable] = { + "GSSAPI": _authenticate_gssapi, + "MONGODB-CR": _authenticate_mongo_cr, + "MONGODB-X509": _authenticate_x509, + "MONGODB-AWS": _authenticate_aws, + "PLAIN": _authenticate_plain, + "SCRAM-SHA-1": functools.partial(_authenticate_scram, mechanism="SCRAM-SHA-1"), + "SCRAM-SHA-256": functools.partial(_authenticate_scram, mechanism="SCRAM-SHA-256"), + "DEFAULT": _authenticate_default, +} + + +class _AuthContext: + def __init__(self, credentials: MongoCredential, address: tuple[str, int]) -> None: + self.credentials = credentials + self.speculative_authenticate: Optional[Mapping[str, Any]] = None + self.address = address + + @staticmethod + def from_credentials( + creds: MongoCredential, address: tuple[str, int] + ) -> Optional[_AuthContext]: + spec_cls = _SPECULATIVE_AUTH_MAP.get(creds.mechanism) + if spec_cls: + return spec_cls(creds, address) + return None + + def speculate_command(self) -> Optional[MutableMapping[str, Any]]: + raise NotImplementedError + + def parse_response(self, hello: Hello) -> None: + self.speculative_authenticate = hello.speculative_authenticate + + def speculate_succeeded(self) -> bool: + return bool(self.speculative_authenticate) + + +class _ScramContext(_AuthContext): + def __init__( + self, credentials: MongoCredential, address: tuple[str, int], mechanism: str + ) -> None: + super().__init__(credentials, address) + self.scram_data: Optional[tuple[bytes, bytes]] = None + self.mechanism = mechanism + + def speculate_command(self) -> Optional[MutableMapping[str, Any]]: + nonce, first_bare, cmd = _authenticate_scram_start(self.credentials, self.mechanism) + # The 'db' field is included only on the speculative command. + cmd["db"] = self.credentials.source + # Save for later use. + self.scram_data = (nonce, first_bare) + return cmd + + +class _X509Context(_AuthContext): + def speculate_command(self) -> MutableMapping[str, Any]: + cmd = SON([("authenticate", 1), ("mechanism", "MONGODB-X509")]) + if self.credentials.username is not None: + cmd["user"] = self.credentials.username + return cmd + + +class _OIDCContext(_AuthContext): + def speculate_command(self) -> Optional[MutableMapping[str, Any]]: + authenticator = _get_authenticator(self.credentials, self.address) + cmd = authenticator.auth_start_cmd(False) + if cmd is None: + return None + cmd["db"] = self.credentials.source + return cmd + + +_SPECULATIVE_AUTH_MAP: Mapping[str, Callable] = { + "MONGODB-X509": _X509Context, + "SCRAM-SHA-1": functools.partial(_ScramContext, mechanism="SCRAM-SHA-1"), + "SCRAM-SHA-256": functools.partial(_ScramContext, mechanism="SCRAM-SHA-256"), + "MONGODB-OIDC": _OIDCContext, + "DEFAULT": functools.partial(_ScramContext, mechanism="SCRAM-SHA-256"), +} + + +def authenticate( + credentials: MongoCredential, conn: Connection, reauthenticate: bool = False +) -> None: + """Authenticate connection.""" + mechanism = credentials.mechanism + auth_func = _AUTH_MAP[mechanism] + if mechanism == "MONGODB-OIDC": + _authenticate_oidc(credentials, conn, reauthenticate) + else: + auth_func(credentials, conn) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/auth_aws.py b/backend/test/lib/python3.8/site-packages/pymongo/auth_aws.py new file mode 100644 index 0000000000000000000000000000000000000000..a327016d730056a2cfb68e1834ac980644e54a14 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/auth_aws.py @@ -0,0 +1,117 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""MONGODB-AWS Authentication helpers.""" +from __future__ import annotations + +try: + import pymongo_auth_aws + from pymongo_auth_aws import AwsCredential, AwsSaslContext, PyMongoAuthAwsError + + _HAVE_MONGODB_AWS = True +except ImportError: + + class AwsSaslContext: # type: ignore + def __init__(self, credentials: MongoCredential): + pass + + _HAVE_MONGODB_AWS = False + +try: + from pymongo_auth_aws.auth import set_cached_credentials, set_use_cached_credentials + + # Enable credential caching. + set_use_cached_credentials(True) +except ImportError: + + def set_cached_credentials(creds: Optional[AwsCredential]) -> None: + pass + + +from typing import TYPE_CHECKING, Any, Mapping, Optional, Type + +import bson +from bson.binary import Binary +from bson.son import SON +from pymongo.errors import ConfigurationError, OperationFailure + +if TYPE_CHECKING: + from bson.typings import _ReadableBuffer + from pymongo.auth import MongoCredential + from pymongo.pool import Connection + + +class _AwsSaslContext(AwsSaslContext): # type: ignore + # Dependency injection: + def binary_type(self) -> Type[Binary]: + """Return the bson.binary.Binary type.""" + return Binary + + def bson_encode(self, doc: Mapping[str, Any]) -> bytes: + """Encode a dictionary to BSON.""" + return bson.encode(doc) + + def bson_decode(self, data: _ReadableBuffer) -> Mapping[str, Any]: + """Decode BSON to a dictionary.""" + return bson.decode(data) + + +def _authenticate_aws(credentials: MongoCredential, conn: Connection) -> None: + """Authenticate using MONGODB-AWS.""" + if not _HAVE_MONGODB_AWS: + raise ConfigurationError( + "MONGODB-AWS authentication requires pymongo-auth-aws: " + "install with: python -m pip install 'pymongo[aws]'" + ) + + if conn.max_wire_version < 9: + raise ConfigurationError("MONGODB-AWS authentication requires MongoDB version 4.4 or later") + + try: + ctx = _AwsSaslContext( + AwsCredential( + credentials.username, + credentials.password, + credentials.mechanism_properties.aws_session_token, + ) + ) + client_payload = ctx.step(None) + client_first = SON( + [("saslStart", 1), ("mechanism", "MONGODB-AWS"), ("payload", client_payload)] + ) + server_first = conn.command("$external", client_first) + res = server_first + # Limit how many times we loop to catch protocol / library issues + for _ in range(10): + client_payload = ctx.step(res["payload"]) + cmd = SON( + [ + ("saslContinue", 1), + ("conversationId", server_first["conversationId"]), + ("payload", client_payload), + ] + ) + res = conn.command("$external", cmd) + if res["done"]: + # SASL complete. + break + except PyMongoAuthAwsError as exc: + # Clear the cached credentials if we hit a failure in auth. + set_cached_credentials(None) + # Convert to OperationFailure and include pymongo-auth-aws version. + raise OperationFailure(f"{exc} (pymongo-auth-aws version {pymongo_auth_aws.__version__})") + except Exception: + # Clear the cached credentials if we hit a failure in auth. + set_cached_credentials(None) + raise diff --git a/backend/test/lib/python3.8/site-packages/pymongo/auth_oidc.py b/backend/test/lib/python3.8/site-packages/pymongo/auth_oidc.py new file mode 100644 index 0000000000000000000000000000000000000000..0ca74fc49ddbf9f805db885dc3cc928f1f7e3041 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/auth_oidc.py @@ -0,0 +1,325 @@ +# Copyright 2023-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""MONGODB-OIDC Authentication helpers.""" +from __future__ import annotations + +import os +import threading +from dataclasses import dataclass, field +from datetime import datetime, timedelta, timezone +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + List, + Mapping, + MutableMapping, + Optional, + Tuple, +) + +import bson +from bson.binary import Binary +from bson.son import SON +from pymongo.errors import ConfigurationError, OperationFailure +from pymongo.helpers import _REAUTHENTICATION_REQUIRED_CODE + +if TYPE_CHECKING: + from pymongo.auth import MongoCredential + from pymongo.pool import Connection + + +@dataclass +class _OIDCProperties: + request_token_callback: Optional[Callable[..., Dict]] + refresh_token_callback: Optional[Callable[..., Dict]] + provider_name: Optional[str] + allowed_hosts: List[str] + + +"""Mechanism properties for MONGODB-OIDC authentication.""" + +TOKEN_BUFFER_MINUTES = 5 +CALLBACK_TIMEOUT_SECONDS = 5 * 60 +CACHE_TIMEOUT_MINUTES = 60 * 5 +CALLBACK_VERSION = 0 + +_CACHE: Dict[str, "_OIDCAuthenticator"] = {} + + +def _get_authenticator( + credentials: MongoCredential, address: Tuple[str, int] +) -> _OIDCAuthenticator: + # Clear out old items in the cache. + now_utc = datetime.now(timezone.utc) + to_remove = [] + for key, value in _CACHE.items(): + if value.cache_exp_utc is not None and value.cache_exp_utc < now_utc: + to_remove.append(key) + for key in to_remove: + del _CACHE[key] + + # Extract values. + principal_name = credentials.username + properties = credentials.mechanism_properties + request_cb = properties.request_token_callback + refresh_cb = properties.refresh_token_callback + + # Validate that the address is allowed. + if not properties.provider_name: + found = False + allowed_hosts = properties.allowed_hosts + for patt in allowed_hosts: + if patt == address[0]: + found = True + elif patt.startswith("*.") and address[0].endswith(patt[1:]): + found = True + if not found: + raise ConfigurationError( + f"Refusing to connect to {address[0]}, which is not in authOIDCAllowedHosts: {allowed_hosts}" + ) + + # Get or create the cache item. + cache_key = f"{principal_name}{address[0]}{address[1]}{id(request_cb)}{id(refresh_cb)}" + _CACHE.setdefault(cache_key, _OIDCAuthenticator(username=principal_name, properties=properties)) + + return _CACHE[cache_key] + + +def _get_cache_exp() -> datetime: + return datetime.now(timezone.utc) + timedelta(minutes=CACHE_TIMEOUT_MINUTES) + + +@dataclass +class _OIDCAuthenticator: + username: str + properties: _OIDCProperties + idp_info: Optional[Dict] = field(default=None) + idp_resp: Optional[Dict] = field(default=None) + reauth_gen_id: int = field(default=0) + idp_info_gen_id: int = field(default=0) + token_gen_id: int = field(default=0) + token_exp_utc: Optional[datetime] = field(default=None) + cache_exp_utc: datetime = field(default_factory=_get_cache_exp) + lock: threading.Lock = field(default_factory=threading.Lock) + + def get_current_token(self, use_callbacks: bool = True) -> Optional[str]: + properties = self.properties + + request_cb = properties.request_token_callback + refresh_cb = properties.refresh_token_callback + if not use_callbacks: + request_cb = None + refresh_cb = None + + current_valid_token = False + if self.token_exp_utc is not None: + now_utc = datetime.now(timezone.utc) + exp_utc = self.token_exp_utc + buffer_seconds = TOKEN_BUFFER_MINUTES * 60 + if (exp_utc - now_utc).total_seconds() >= buffer_seconds: + current_valid_token = True + + timeout = CALLBACK_TIMEOUT_SECONDS + if not use_callbacks and not current_valid_token: + return None + + if not current_valid_token and request_cb is not None: + prev_token = self.idp_resp["access_token"] if self.idp_resp else None + with self.lock: + # See if the token was changed while we were waiting for the + # lock. + new_token = self.idp_resp["access_token"] if self.idp_resp else None + if new_token != prev_token: + return new_token + + refresh_token = self.idp_resp and self.idp_resp.get("refresh_token") + refresh_token = refresh_token or "" + context = { + "timeout_seconds": timeout, + "version": CALLBACK_VERSION, + "refresh_token": refresh_token, + } + + if self.idp_resp is None or refresh_cb is None: + self.idp_resp = request_cb(self.idp_info, context) + elif request_cb is not None: + self.idp_resp = refresh_cb(self.idp_info, context) + cache_exp_utc = datetime.now(timezone.utc) + timedelta( + minutes=CACHE_TIMEOUT_MINUTES + ) + self.cache_exp_utc = cache_exp_utc + self.token_gen_id += 1 + + token_result = self.idp_resp + + # Validate callback return value. + if not isinstance(token_result, dict): + raise ValueError("OIDC callback returned invalid result") + + if "access_token" not in token_result: + raise ValueError("OIDC callback did not return an access_token") + + expected = ["access_token", "expires_in_seconds", "refesh_token"] + for key in token_result: + if key not in expected: + raise ValueError(f'Unexpected field in callback result "{key}"') + + token = token_result["access_token"] + + if "expires_in_seconds" in token_result: + expires_in = int(token_result["expires_in_seconds"]) + buffer_seconds = TOKEN_BUFFER_MINUTES * 60 + if expires_in >= buffer_seconds: + now_utc = datetime.now(timezone.utc) + exp_utc = now_utc + timedelta(seconds=expires_in) + self.token_exp_utc = exp_utc + + return token + + def auth_start_cmd(self, use_callbacks: bool = True) -> Optional[SON[str, Any]]: + properties = self.properties + + # Handle aws provider credentials. + if properties.provider_name == "aws": + aws_identity_file = os.environ["AWS_WEB_IDENTITY_TOKEN_FILE"] + with open(aws_identity_file) as fid: + token: Optional[str] = fid.read().strip() + payload = {"jwt": token} + cmd = SON( + [ + ("saslStart", 1), + ("mechanism", "MONGODB-OIDC"), + ("payload", Binary(bson.encode(payload))), + ] + ) + return cmd + + principal_name = self.username + + if self.idp_info is not None: + self.cache_exp_utc = datetime.now(timezone.utc) + timedelta( + minutes=CACHE_TIMEOUT_MINUTES + ) + + if self.idp_info is None: + self.cache_exp_utc = _get_cache_exp() + + if self.idp_info is None: + # Send the SASL start with the optional principal name. + payload = {} + + if principal_name: + payload["n"] = principal_name + + cmd = SON( + [ + ("saslStart", 1), + ("mechanism", "MONGODB-OIDC"), + ("payload", Binary(bson.encode(payload))), + ("autoAuthorize", 1), + ] + ) + return cmd + + token = self.get_current_token(use_callbacks) + if not token: + return None + bin_payload = Binary(bson.encode({"jwt": token})) + return SON( + [ + ("saslStart", 1), + ("mechanism", "MONGODB-OIDC"), + ("payload", bin_payload), + ] + ) + + def clear(self) -> None: + self.idp_info = None + self.idp_resp = None + self.token_exp_utc = None + + def run_command( + self, conn: Connection, cmd: MutableMapping[str, Any] + ) -> Optional[Mapping[str, Any]]: + try: + return conn.command("$external", cmd, no_reauth=True) # type: ignore[call-arg] + except OperationFailure as exc: + self.clear() + if exc.code == _REAUTHENTICATION_REQUIRED_CODE: + if "jwt" in bson.decode(cmd["payload"]): + if self.idp_info_gen_id > self.reauth_gen_id: + raise + return self.authenticate(conn, reauthenticate=True) + raise + + def authenticate( + self, conn: Connection, reauthenticate: bool = False + ) -> Optional[Mapping[str, Any]]: + if reauthenticate: + prev_id = getattr(conn, "oidc_token_gen_id", None) + # Check if we've already changed tokens. + if prev_id == self.token_gen_id: + self.reauth_gen_id = self.idp_info_gen_id + self.token_exp_utc = None + if not self.properties.refresh_token_callback: + self.clear() + + ctx = conn.auth_ctx + cmd = None + + if ctx and ctx.speculate_succeeded(): + resp = ctx.speculative_authenticate + else: + cmd = self.auth_start_cmd() + assert cmd is not None + resp = self.run_command(conn, cmd) + + assert resp is not None + if resp["done"]: + conn.oidc_token_gen_id = self.token_gen_id + return None + + server_resp: Dict = bson.decode(resp["payload"]) + if "issuer" in server_resp: + self.idp_info = server_resp + self.idp_info_gen_id += 1 + + conversation_id = resp["conversationId"] + token = self.get_current_token() + conn.oidc_token_gen_id = self.token_gen_id + bin_payload = Binary(bson.encode({"jwt": token})) + cmd = SON( + [ + ("saslContinue", 1), + ("conversationId", conversation_id), + ("payload", bin_payload), + ] + ) + resp = self.run_command(conn, cmd) + assert resp is not None + if not resp["done"]: + self.clear() + raise OperationFailure("SASL conversation failed to complete.") + return resp + + +def _authenticate_oidc( + credentials: MongoCredential, conn: Connection, reauthenticate: bool +) -> Optional[Mapping[str, Any]]: + """Authenticate using MONGODB-OIDC.""" + authenticator = _get_authenticator(credentials, conn.address) + return authenticator.authenticate(conn, reauthenticate=reauthenticate) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/bulk.py b/backend/test/lib/python3.8/site-packages/pymongo/bulk.py new file mode 100644 index 0000000000000000000000000000000000000000..3401398d72b1d21a33abb5c128f7f91025b7a1bc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/bulk.py @@ -0,0 +1,580 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The bulk write operations interface. + +.. versionadded:: 2.7 +""" +from __future__ import annotations + +import copy +from collections.abc import MutableMapping +from itertools import islice +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Tuple, + Type, + Union, +) + +from bson.objectid import ObjectId +from bson.raw_bson import RawBSONDocument +from bson.son import SON +from pymongo import _csot, common +from pymongo.client_session import ClientSession, _validate_session_write_concern +from pymongo.common import ( + validate_is_document_type, + validate_ok_for_replace, + validate_ok_for_update, +) +from pymongo.errors import ( + BulkWriteError, + ConfigurationError, + InvalidOperation, + OperationFailure, +) +from pymongo.helpers import _RETRYABLE_ERROR_CODES, _get_wce_doc +from pymongo.message import ( + _DELETE, + _INSERT, + _UPDATE, + _BulkWriteContext, + _EncryptedBulkWriteContext, + _randint, +) +from pymongo.read_preferences import ReadPreference +from pymongo.write_concern import WriteConcern + +if TYPE_CHECKING: + from pymongo.collection import Collection + from pymongo.pool import Connection + from pymongo.typings import _DocumentOut, _DocumentType, _Pipeline + +_DELETE_ALL: int = 0 +_DELETE_ONE: int = 1 + +# For backwards compatibility. See MongoDB src/mongo/base/error_codes.err +_BAD_VALUE: int = 2 +_UNKNOWN_ERROR: int = 8 +_WRITE_CONCERN_ERROR: int = 64 + +_COMMANDS: Tuple[str, str, str] = ("insert", "update", "delete") + + +class _Run: + """Represents a batch of write operations.""" + + def __init__(self, op_type: int) -> None: + """Initialize a new Run object.""" + self.op_type: int = op_type + self.index_map: List[int] = [] + self.ops: List[Any] = [] + self.idx_offset: int = 0 + + def index(self, idx: int) -> int: + """Get the original index of an operation in this run. + + :Parameters: + - `idx`: The Run index that maps to the original index. + """ + return self.index_map[idx] + + def add(self, original_index: int, operation: Any) -> None: + """Add an operation to this Run instance. + + :Parameters: + - `original_index`: The original index of this operation + within a larger bulk operation. + - `operation`: The operation document. + """ + self.index_map.append(original_index) + self.ops.append(operation) + + +def _merge_command( + run: _Run, + full_result: MutableMapping[str, Any], + offset: int, + result: Mapping[str, Any], +) -> None: + """Merge a write command result into the full bulk result.""" + affected = result.get("n", 0) + + if run.op_type == _INSERT: + full_result["nInserted"] += affected + + elif run.op_type == _DELETE: + full_result["nRemoved"] += affected + + elif run.op_type == _UPDATE: + upserted = result.get("upserted") + if upserted: + n_upserted = len(upserted) + for doc in upserted: + doc["index"] = run.index(doc["index"] + offset) + full_result["upserted"].extend(upserted) + full_result["nUpserted"] += n_upserted + full_result["nMatched"] += affected - n_upserted + else: + full_result["nMatched"] += affected + full_result["nModified"] += result["nModified"] + + write_errors = result.get("writeErrors") + if write_errors: + for doc in write_errors: + # Leave the server response intact for APM. + replacement = doc.copy() + idx = doc["index"] + offset + replacement["index"] = run.index(idx) + # Add the failed operation to the error document. + replacement["op"] = run.ops[idx] + full_result["writeErrors"].append(replacement) + + wce = _get_wce_doc(result) + if wce: + full_result["writeConcernErrors"].append(wce) + + +def _raise_bulk_write_error(full_result: _DocumentOut) -> NoReturn: + """Raise a BulkWriteError from the full bulk api result.""" + if full_result["writeErrors"]: + full_result["writeErrors"].sort(key=lambda error: error["index"]) + raise BulkWriteError(full_result) + + +class _Bulk: + """The private guts of the bulk write API.""" + + def __init__( + self, + collection: Collection[_DocumentType], + ordered: bool, + bypass_document_validation: bool, + comment: Optional[str] = None, + let: Optional[Any] = None, + ) -> None: + """Initialize a _Bulk instance.""" + self.collection = collection.with_options( + codec_options=collection.codec_options._replace( + unicode_decode_error_handler="replace", document_class=dict + ) + ) + self.let = let + if self.let is not None: + common.validate_is_document_type("let", self.let) + self.comment: Optional[str] = comment + self.ordered = ordered + self.ops: List[Tuple[int, Mapping[str, Any]]] = [] + self.executed = False + self.bypass_doc_val = bypass_document_validation + self.uses_collation = False + self.uses_array_filters = False + self.uses_hint_update = False + self.uses_hint_delete = False + self.is_retryable = True + self.retrying = False + self.started_retryable_write = False + # Extra state so that we know where to pick up on a retry attempt. + self.current_run = None + self.next_run = None + + @property + def bulk_ctx_class(self) -> Type[_BulkWriteContext]: + encrypter = self.collection.database.client._encrypter + if encrypter and not encrypter._bypass_auto_encryption: + return _EncryptedBulkWriteContext + else: + return _BulkWriteContext + + def add_insert(self, document: _DocumentOut) -> None: + """Add an insert document to the list of ops.""" + validate_is_document_type("document", document) + # Generate ObjectId client side. + if not (isinstance(document, RawBSONDocument) or "_id" in document): + document["_id"] = ObjectId() + self.ops.append((_INSERT, document)) + + def add_update( + self, + selector: Mapping[str, Any], + update: Union[Mapping[str, Any], _Pipeline], + multi: bool = False, + upsert: bool = False, + collation: Optional[Mapping[str, Any]] = None, + array_filters: Optional[List[Mapping[str, Any]]] = None, + hint: Union[str, SON[str, Any], None] = None, + ) -> None: + """Create an update document and add it to the list of ops.""" + validate_ok_for_update(update) + cmd: Dict[str, Any] = dict( + [("q", selector), ("u", update), ("multi", multi), ("upsert", upsert)] + ) + if collation is not None: + self.uses_collation = True + cmd["collation"] = collation + if array_filters is not None: + self.uses_array_filters = True + cmd["arrayFilters"] = array_filters + if hint is not None: + self.uses_hint_update = True + cmd["hint"] = hint + if multi: + # A bulk_write containing an update_many is not retryable. + self.is_retryable = False + self.ops.append((_UPDATE, cmd)) + + def add_replace( + self, + selector: Mapping[str, Any], + replacement: Mapping[str, Any], + upsert: bool = False, + collation: Optional[Mapping[str, Any]] = None, + hint: Union[str, SON[str, Any], None] = None, + ) -> None: + """Create a replace document and add it to the list of ops.""" + validate_ok_for_replace(replacement) + cmd = SON([("q", selector), ("u", replacement), ("multi", False), ("upsert", upsert)]) + if collation is not None: + self.uses_collation = True + cmd["collation"] = collation + if hint is not None: + self.uses_hint_update = True + cmd["hint"] = hint + self.ops.append((_UPDATE, cmd)) + + def add_delete( + self, + selector: Mapping[str, Any], + limit: int, + collation: Optional[Mapping[str, Any]] = None, + hint: Union[str, SON[str, Any], None] = None, + ) -> None: + """Create a delete document and add it to the list of ops.""" + cmd = SON([("q", selector), ("limit", limit)]) + if collation is not None: + self.uses_collation = True + cmd["collation"] = collation + if hint is not None: + self.uses_hint_delete = True + cmd["hint"] = hint + if limit == _DELETE_ALL: + # A bulk_write containing a delete_many is not retryable. + self.is_retryable = False + self.ops.append((_DELETE, cmd)) + + def gen_ordered(self) -> Iterator[Optional[_Run]]: + """Generate batches of operations, batched by type of + operation, in the order **provided**. + """ + run = None + for idx, (op_type, operation) in enumerate(self.ops): + if run is None: + run = _Run(op_type) + elif run.op_type != op_type: + yield run + run = _Run(op_type) + run.add(idx, operation) + yield run + + def gen_unordered(self) -> Iterator[_Run]: + """Generate batches of operations, batched by type of + operation, in arbitrary order. + """ + operations = [_Run(_INSERT), _Run(_UPDATE), _Run(_DELETE)] + for idx, (op_type, operation) in enumerate(self.ops): + operations[op_type].add(idx, operation) + + for run in operations: + if run.ops: + yield run + + def _execute_command( + self, + generator: Iterator[Any], + write_concern: WriteConcern, + session: Optional[ClientSession], + conn: Connection, + op_id: int, + retryable: bool, + full_result: MutableMapping[str, Any], + final_write_concern: Optional[WriteConcern] = None, + ) -> None: + db_name = self.collection.database.name + client = self.collection.database.client + listeners = client._event_listeners + + if not self.current_run: + self.current_run = next(generator) + self.next_run = None + run = self.current_run + + # Connection.command validates the session, but we use + # Connection.write_command + conn.validate_session(client, session) + last_run = False + + while run: + if not self.retrying: + self.next_run = next(generator, None) + if self.next_run is None: + last_run = True + + cmd_name = _COMMANDS[run.op_type] + bwc = self.bulk_ctx_class( + db_name, + cmd_name, + conn, + op_id, + listeners, + session, + run.op_type, + self.collection.codec_options, + ) + + while run.idx_offset < len(run.ops): + # If this is the last possible operation, use the + # final write concern. + if last_run and (len(run.ops) - run.idx_offset) == 1: + write_concern = final_write_concern or write_concern + + cmd = SON([(cmd_name, self.collection.name), ("ordered", self.ordered)]) + if self.comment: + cmd["comment"] = self.comment + _csot.apply_write_concern(cmd, write_concern) + if self.bypass_doc_val: + cmd["bypassDocumentValidation"] = True + if self.let is not None and run.op_type in (_DELETE, _UPDATE): + cmd["let"] = self.let + if session: + # Start a new retryable write unless one was already + # started for this command. + if retryable and not self.started_retryable_write: + session._start_retryable_write() + self.started_retryable_write = True + session._apply_to(cmd, retryable, ReadPreference.PRIMARY, conn) + conn.send_cluster_time(cmd, session, client) + conn.add_server_api(cmd) + # CSOT: apply timeout before encoding the command. + conn.apply_timeout(client, cmd) + ops = islice(run.ops, run.idx_offset, None) + + # Run as many ops as possible in one command. + if write_concern.acknowledged: + result, to_send = bwc.execute(cmd, ops, client) + + # Retryable writeConcernErrors halt the execution of this run. + wce = result.get("writeConcernError", {}) + if wce.get("code", 0) in _RETRYABLE_ERROR_CODES: + # Synthesize the full bulk result without modifying the + # current one because this write operation may be retried. + full = copy.deepcopy(full_result) + _merge_command(run, full, run.idx_offset, result) + _raise_bulk_write_error(full) + + _merge_command(run, full_result, run.idx_offset, result) + + # We're no longer in a retry once a command succeeds. + self.retrying = False + self.started_retryable_write = False + + if self.ordered and "writeErrors" in result: + break + else: + to_send = bwc.execute_unack(cmd, ops, client) + + run.idx_offset += len(to_send) + + # We're supposed to continue if errors are + # at the write concern level (e.g. wtimeout) + if self.ordered and full_result["writeErrors"]: + break + # Reset our state + self.current_run = run = self.next_run + + def execute_command( + self, + generator: Iterator[Any], + write_concern: WriteConcern, + session: Optional[ClientSession], + ) -> Dict[str, Any]: + """Execute using write commands.""" + # nModified is only reported for write commands, not legacy ops. + full_result = { + "writeErrors": [], + "writeConcernErrors": [], + "nInserted": 0, + "nUpserted": 0, + "nMatched": 0, + "nModified": 0, + "nRemoved": 0, + "upserted": [], + } + op_id = _randint() + + def retryable_bulk( + session: Optional[ClientSession], conn: Connection, retryable: bool + ) -> None: + self._execute_command( + generator, + write_concern, + session, + conn, + op_id, + retryable, + full_result, + ) + + client = self.collection.database.client + with client._tmp_session(session) as s: + client._retry_with_session(self.is_retryable, retryable_bulk, s, self) + + if full_result["writeErrors"] or full_result["writeConcernErrors"]: + _raise_bulk_write_error(full_result) + return full_result + + def execute_op_msg_no_results(self, conn: Connection, generator: Iterator[Any]) -> None: + """Execute write commands with OP_MSG and w=0 writeConcern, unordered.""" + db_name = self.collection.database.name + client = self.collection.database.client + listeners = client._event_listeners + op_id = _randint() + + if not self.current_run: + self.current_run = next(generator) + run = self.current_run + + while run: + cmd_name = _COMMANDS[run.op_type] + bwc = self.bulk_ctx_class( + db_name, + cmd_name, + conn, + op_id, + listeners, + None, + run.op_type, + self.collection.codec_options, + ) + + while run.idx_offset < len(run.ops): + cmd = SON( + [ + (cmd_name, self.collection.name), + ("ordered", False), + ("writeConcern", {"w": 0}), + ] + ) + conn.add_server_api(cmd) + ops = islice(run.ops, run.idx_offset, None) + # Run as many ops as possible. + to_send = bwc.execute_unack(cmd, ops, client) + run.idx_offset += len(to_send) + self.current_run = run = next(generator, None) + + def execute_command_no_results( + self, + conn: Connection, + generator: Iterator[Any], + write_concern: WriteConcern, + ) -> None: + """Execute write commands with OP_MSG and w=0 WriteConcern, ordered.""" + full_result = { + "writeErrors": [], + "writeConcernErrors": [], + "nInserted": 0, + "nUpserted": 0, + "nMatched": 0, + "nModified": 0, + "nRemoved": 0, + "upserted": [], + } + # Ordered bulk writes have to be acknowledged so that we stop + # processing at the first error, even when the application + # specified unacknowledged writeConcern. + initial_write_concern = WriteConcern() + op_id = _randint() + try: + self._execute_command( + generator, + initial_write_concern, + None, + conn, + op_id, + False, + full_result, + write_concern, + ) + except OperationFailure: + pass + + def execute_no_results( + self, + conn: Connection, + generator: Iterator[Any], + write_concern: WriteConcern, + ) -> None: + """Execute all operations, returning no results (w=0).""" + if self.uses_collation: + raise ConfigurationError("Collation is unsupported for unacknowledged writes.") + if self.uses_array_filters: + raise ConfigurationError("arrayFilters is unsupported for unacknowledged writes.") + # Guard against unsupported unacknowledged writes. + unack = write_concern and not write_concern.acknowledged + if unack and self.uses_hint_delete and conn.max_wire_version < 9: + raise ConfigurationError( + "Must be connected to MongoDB 4.4+ to use hint on unacknowledged delete commands." + ) + if unack and self.uses_hint_update and conn.max_wire_version < 8: + raise ConfigurationError( + "Must be connected to MongoDB 4.2+ to use hint on unacknowledged update commands." + ) + # Cannot have both unacknowledged writes and bypass document validation. + if self.bypass_doc_val: + raise OperationFailure( + "Cannot set bypass_document_validation with unacknowledged write concern" + ) + + if self.ordered: + return self.execute_command_no_results(conn, generator, write_concern) + return self.execute_op_msg_no_results(conn, generator) + + def execute(self, write_concern: WriteConcern, session: Optional[ClientSession]) -> Any: + """Execute operations.""" + if not self.ops: + raise InvalidOperation("No operations to execute") + if self.executed: + raise InvalidOperation("Bulk operations can only be executed once.") + self.executed = True + write_concern = write_concern or self.collection.write_concern + session = _validate_session_write_concern(session, write_concern) + + if self.ordered: + generator = self.gen_ordered() + else: + generator = self.gen_unordered() + + client = self.collection.database.client + if not write_concern.acknowledged: + with client._conn_for_writes(session) as connection: + self.execute_no_results(connection, generator, write_concern) + return None + else: + return self.execute_command(generator, write_concern, session) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/change_stream.py b/backend/test/lib/python3.8/site-packages/pymongo/change_stream.py new file mode 100644 index 0000000000000000000000000000000000000000..ed3031ef523ee806a740e345fb3d140368b85e80 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/change_stream.py @@ -0,0 +1,497 @@ +# Copyright 2017 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Watch changes on a collection, a database, or the entire cluster.""" +from __future__ import annotations + +import copy +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generic, + List, + Mapping, + Optional, + Type, + Union, +) + +from bson import _bson_to_dict +from bson.raw_bson import RawBSONDocument +from bson.timestamp import Timestamp +from pymongo import _csot, common +from pymongo.aggregation import ( + _AggregationCommand, + _CollectionAggregationCommand, + _DatabaseAggregationCommand, +) +from pymongo.collation import validate_collation_or_none +from pymongo.command_cursor import CommandCursor +from pymongo.errors import ( + ConnectionFailure, + CursorNotFound, + InvalidOperation, + OperationFailure, + PyMongoError, +) +from pymongo.typings import _CollationIn, _DocumentType, _Pipeline + +# The change streams spec considers the following server errors from the +# getMore command non-resumable. All other getMore errors are resumable. +_RESUMABLE_GETMORE_ERRORS = frozenset( + [ + 6, # HostUnreachable + 7, # HostNotFound + 89, # NetworkTimeout + 91, # ShutdownInProgress + 189, # PrimarySteppedDown + 262, # ExceededTimeLimit + 9001, # SocketException + 10107, # NotWritablePrimary + 11600, # InterruptedAtShutdown + 11602, # InterruptedDueToReplStateChange + 13435, # NotPrimaryNoSecondaryOk + 13436, # NotPrimaryOrSecondary + 63, # StaleShardVersion + 150, # StaleEpoch + 13388, # StaleConfig + 234, # RetryChangeStream + 133, # FailedToSatisfyReadPreference + ] +) + + +if TYPE_CHECKING: + from pymongo.client_session import ClientSession + from pymongo.collection import Collection + from pymongo.database import Database + from pymongo.mongo_client import MongoClient + from pymongo.pool import Connection + + +def _resumable(exc: PyMongoError) -> bool: + """Return True if given a resumable change stream error.""" + if isinstance(exc, (ConnectionFailure, CursorNotFound)): + return True + if isinstance(exc, OperationFailure): + if exc._max_wire_version is None: + return False + return ( + exc._max_wire_version >= 9 and exc.has_error_label("ResumableChangeStreamError") + ) or (exc._max_wire_version < 9 and exc.code in _RESUMABLE_GETMORE_ERRORS) + return False + + +class ChangeStream(Generic[_DocumentType]): + """The internal abstract base class for change stream cursors. + + Should not be called directly by application developers. Use + :meth:`pymongo.collection.Collection.watch`, + :meth:`pymongo.database.Database.watch`, or + :meth:`pymongo.mongo_client.MongoClient.watch` instead. + + .. versionadded:: 3.6 + .. seealso:: The MongoDB documentation on `changeStreams <https://mongodb.com/docs/manual/changeStreams/>`_. + """ + + def __init__( + self, + target: Union[ + MongoClient[_DocumentType], Database[_DocumentType], Collection[_DocumentType] + ], + pipeline: Optional[_Pipeline], + full_document: Optional[str], + resume_after: Optional[Mapping[str, Any]], + max_await_time_ms: Optional[int], + batch_size: Optional[int], + collation: Optional[_CollationIn], + start_at_operation_time: Optional[Timestamp], + session: Optional[ClientSession], + start_after: Optional[Mapping[str, Any]], + comment: Optional[Any] = None, + full_document_before_change: Optional[str] = None, + show_expanded_events: Optional[bool] = None, + ) -> None: + if pipeline is None: + pipeline = [] + pipeline = common.validate_list("pipeline", pipeline) + common.validate_string_or_none("full_document", full_document) + validate_collation_or_none(collation) + common.validate_non_negative_integer_or_none("batchSize", batch_size) + + self._decode_custom = False + self._orig_codec_options = target.codec_options + if target.codec_options.type_registry._decoder_map: + self._decode_custom = True + # Keep the type registry so that we support encoding custom types + # in the pipeline. + self._target = target.with_options( # type: ignore + codec_options=target.codec_options.with_options(document_class=RawBSONDocument) + ) + else: + self._target = target + + self._pipeline = copy.deepcopy(pipeline) + self._full_document = full_document + self._full_document_before_change = full_document_before_change + self._uses_start_after = start_after is not None + self._uses_resume_after = resume_after is not None + self._resume_token = copy.deepcopy(start_after or resume_after) + self._max_await_time_ms = max_await_time_ms + self._batch_size = batch_size + self._collation = collation + self._start_at_operation_time = start_at_operation_time + self._session = session + self._comment = comment + self._closed = False + self._timeout = self._target._timeout + self._show_expanded_events = show_expanded_events + # Initialize cursor. + self._cursor = self._create_cursor() + + @property + def _aggregation_command_class(self) -> Type[_AggregationCommand]: + """The aggregation command class to be used.""" + raise NotImplementedError + + @property + def _client(self) -> MongoClient: + """The client against which the aggregation commands for + this ChangeStream will be run. + """ + raise NotImplementedError + + def _change_stream_options(self) -> Dict[str, Any]: + """Return the options dict for the $changeStream pipeline stage.""" + options: Dict[str, Any] = {} + if self._full_document is not None: + options["fullDocument"] = self._full_document + + if self._full_document_before_change is not None: + options["fullDocumentBeforeChange"] = self._full_document_before_change + + resume_token = self.resume_token + if resume_token is not None: + if self._uses_start_after: + options["startAfter"] = resume_token + else: + options["resumeAfter"] = resume_token + + if self._start_at_operation_time is not None: + options["startAtOperationTime"] = self._start_at_operation_time + + if self._show_expanded_events: + options["showExpandedEvents"] = self._show_expanded_events + + return options + + def _command_options(self) -> Dict[str, Any]: + """Return the options dict for the aggregation command.""" + options = {} + if self._max_await_time_ms is not None: + options["maxAwaitTimeMS"] = self._max_await_time_ms + if self._batch_size is not None: + options["batchSize"] = self._batch_size + return options + + def _aggregation_pipeline(self) -> List[Dict[str, Any]]: + """Return the full aggregation pipeline for this ChangeStream.""" + options = self._change_stream_options() + full_pipeline: list = [{"$changeStream": options}] + full_pipeline.extend(self._pipeline) + return full_pipeline + + def _process_result(self, result: Mapping[str, Any], conn: Connection) -> None: + """Callback that caches the postBatchResumeToken or + startAtOperationTime from a changeStream aggregate command response + containing an empty batch of change documents. + + This is implemented as a callback because we need access to the wire + version in order to determine whether to cache this value. + """ + if not result["cursor"]["firstBatch"]: + if "postBatchResumeToken" in result["cursor"]: + self._resume_token = result["cursor"]["postBatchResumeToken"] + elif ( + self._start_at_operation_time is None + and self._uses_resume_after is False + and self._uses_start_after is False + and conn.max_wire_version >= 7 + ): + self._start_at_operation_time = result.get("operationTime") + # PYTHON-2181: informative error on missing operationTime. + if self._start_at_operation_time is None: + raise OperationFailure( + "Expected field 'operationTime' missing from command " + "response : {!r}".format(result) + ) + + def _run_aggregation_cmd( + self, session: Optional[ClientSession], explicit_session: bool + ) -> CommandCursor: + """Run the full aggregation pipeline for this ChangeStream and return + the corresponding CommandCursor. + """ + cmd = self._aggregation_command_class( + self._target, + CommandCursor, + self._aggregation_pipeline(), + self._command_options(), + explicit_session, + result_processor=self._process_result, + comment=self._comment, + ) + return self._client._retryable_read( + cmd.get_cursor, self._target._read_preference_for(session), session + ) + + def _create_cursor(self) -> CommandCursor: + with self._client._tmp_session(self._session, close=False) as s: + return self._run_aggregation_cmd(session=s, explicit_session=self._session is not None) + + def _resume(self) -> None: + """Reestablish this change stream after a resumable error.""" + try: + self._cursor.close() + except PyMongoError: + pass + self._cursor = self._create_cursor() + + def close(self) -> None: + """Close this ChangeStream.""" + self._closed = True + self._cursor.close() + + def __iter__(self) -> "ChangeStream[_DocumentType]": + return self + + @property + def resume_token(self) -> Optional[Mapping[str, Any]]: + """The cached resume token that will be used to resume after the most + recently returned change. + + .. versionadded:: 3.9 + """ + return copy.deepcopy(self._resume_token) + + @_csot.apply + def next(self) -> _DocumentType: + """Advance the cursor. + + This method blocks until the next change document is returned or an + unrecoverable error is raised. This method is used when iterating over + all changes in the cursor. For example:: + + try: + resume_token = None + pipeline = [{'$match': {'operationType': 'insert'}}] + with db.collection.watch(pipeline) as stream: + for insert_change in stream: + print(insert_change) + resume_token = stream.resume_token + except pymongo.errors.PyMongoError: + # The ChangeStream encountered an unrecoverable error or the + # resume attempt failed to recreate the cursor. + if resume_token is None: + # There is no usable resume token because there was a + # failure during ChangeStream initialization. + logging.error('...') + else: + # Use the interrupted ChangeStream's resume token to create + # a new ChangeStream. The new stream will continue from the + # last seen insert change without missing any events. + with db.collection.watch( + pipeline, resume_after=resume_token) as stream: + for insert_change in stream: + print(insert_change) + + Raises :exc:`StopIteration` if this ChangeStream is closed. + """ + while self.alive: + doc = self.try_next() + if doc is not None: + return doc + + raise StopIteration + + __next__ = next + + @property + def alive(self) -> bool: + """Does this cursor have the potential to return more data? + + .. note:: Even if :attr:`alive` is ``True``, :meth:`next` can raise + :exc:`StopIteration` and :meth:`try_next` can return ``None``. + + .. versionadded:: 3.8 + """ + return not self._closed + + @_csot.apply + def try_next(self) -> Optional[_DocumentType]: + """Advance the cursor without blocking indefinitely. + + This method returns the next change document without waiting + indefinitely for the next change. For example:: + + with db.collection.watch() as stream: + while stream.alive: + change = stream.try_next() + # Note that the ChangeStream's resume token may be updated + # even when no changes are returned. + print("Current resume token: %r" % (stream.resume_token,)) + if change is not None: + print("Change document: %r" % (change,)) + continue + # We end up here when there are no recent changes. + # Sleep for a while before trying again to avoid flooding + # the server with getMore requests when no changes are + # available. + time.sleep(10) + + If no change document is cached locally then this method runs a single + getMore command. If the getMore yields any documents, the next + document is returned, otherwise, if the getMore returns no documents + (because there have been no changes) then ``None`` is returned. + + :Returns: + The next change document or ``None`` when no document is available + after running a single getMore or when the cursor is closed. + + .. versionadded:: 3.8 + """ + if not self._closed and not self._cursor.alive: + self._resume() + + # Attempt to get the next change with at most one getMore and at most + # one resume attempt. + try: + try: + change = self._cursor._try_next(True) + except PyMongoError as exc: + if not _resumable(exc): + raise + self._resume() + change = self._cursor._try_next(False) + except PyMongoError as exc: + # Close the stream after a fatal error. + if not _resumable(exc) and not exc.timeout: + self.close() + raise + except Exception: + self.close() + raise + + # Check if the cursor was invalidated. + if not self._cursor.alive: + self._closed = True + + # If no changes are available. + if change is None: + # We have either iterated over all documents in the cursor, + # OR the most-recently returned batch is empty. In either case, + # update the cached resume token with the postBatchResumeToken if + # one was returned. We also clear the startAtOperationTime. + if self._cursor._post_batch_resume_token is not None: + self._resume_token = self._cursor._post_batch_resume_token + self._start_at_operation_time = None + return change + + # Else, changes are available. + try: + resume_token = change["_id"] + except KeyError: + self.close() + raise InvalidOperation( + "Cannot provide resume functionality when the resume token is missing." + ) + + # If this is the last change document from the current batch, cache the + # postBatchResumeToken. + if not self._cursor._has_next() and self._cursor._post_batch_resume_token: + resume_token = self._cursor._post_batch_resume_token + + # Hereafter, don't use startAfter; instead use resumeAfter. + self._uses_start_after = False + self._uses_resume_after = True + + # Cache the resume token and clear startAtOperationTime. + self._resume_token = resume_token + self._start_at_operation_time = None + + if self._decode_custom: + return _bson_to_dict(change.raw, self._orig_codec_options) + return change + + def __enter__(self) -> "ChangeStream": + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + self.close() + + +class CollectionChangeStream(ChangeStream, Generic[_DocumentType]): + """A change stream that watches changes on a single collection. + + Should not be called directly by application developers. Use + helper method :meth:`pymongo.collection.Collection.watch` instead. + + .. versionadded:: 3.7 + """ + + _target: Collection[_DocumentType] + + @property + def _aggregation_command_class(self) -> Type[_CollectionAggregationCommand]: + return _CollectionAggregationCommand + + @property + def _client(self) -> MongoClient: + return self._target.database.client + + +class DatabaseChangeStream(ChangeStream, Generic[_DocumentType]): + """A change stream that watches changes on all collections in a database. + + Should not be called directly by application developers. Use + helper method :meth:`pymongo.database.Database.watch` instead. + + .. versionadded:: 3.7 + """ + + _target: Database[_DocumentType] + + @property + def _aggregation_command_class(self) -> Type[_DatabaseAggregationCommand]: + return _DatabaseAggregationCommand + + @property + def _client(self) -> MongoClient: + return self._target.client + + +class ClusterChangeStream(DatabaseChangeStream, Generic[_DocumentType]): + """A change stream that watches changes on all collections in the cluster. + + Should not be called directly by application developers. Use + helper method :meth:`pymongo.mongo_client.MongoClient.watch` instead. + + .. versionadded:: 3.7 + """ + + def _change_stream_options(self) -> Dict[str, Any]: + options = super()._change_stream_options() + options["allChangesForCluster"] = True + return options diff --git a/backend/test/lib/python3.8/site-packages/pymongo/client_options.py b/backend/test/lib/python3.8/site-packages/pymongo/client_options.py new file mode 100644 index 0000000000000000000000000000000000000000..a83216e9ddacbd8171061fe50c890b96034dc738 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/client_options.py @@ -0,0 +1,320 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Tools to parse mongo client options.""" +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, List, Mapping, Optional, Sequence, Tuple, cast + +from bson.codec_options import _parse_codec_options +from pymongo import common +from pymongo.auth import MongoCredential, _build_credentials_tuple +from pymongo.common import validate_boolean +from pymongo.compression_support import CompressionSettings +from pymongo.errors import ConfigurationError +from pymongo.monitoring import _EventListener, _EventListeners +from pymongo.pool import PoolOptions +from pymongo.read_concern import ReadConcern +from pymongo.read_preferences import ( + _ServerMode, + make_read_preference, + read_pref_mode_from_name, +) +from pymongo.server_selectors import any_server_selector +from pymongo.ssl_support import get_ssl_context +from pymongo.write_concern import WriteConcern + +if TYPE_CHECKING: + from bson.codec_options import CodecOptions + from pymongo.encryption import AutoEncryptionOpts + from pymongo.pyopenssl_context import SSLContext + from pymongo.topology_description import _ServerSelector + + +def _parse_credentials( + username: str, password: str, database: Optional[str], options: Mapping[str, Any] +) -> Optional[MongoCredential]: + """Parse authentication credentials.""" + mechanism = options.get("authmechanism", "DEFAULT" if username else None) + source = options.get("authsource") + if username or mechanism: + return _build_credentials_tuple(mechanism, source, username, password, options, database) + return None + + +def _parse_read_preference(options: Mapping[str, Any]) -> _ServerMode: + """Parse read preference options.""" + if "read_preference" in options: + return options["read_preference"] + + name = options.get("readpreference", "primary") + mode = read_pref_mode_from_name(name) + tags = options.get("readpreferencetags") + max_staleness = options.get("maxstalenessseconds", -1) + return make_read_preference(mode, tags, max_staleness) + + +def _parse_write_concern(options: Mapping[str, Any]) -> WriteConcern: + """Parse write concern options.""" + concern = options.get("w") + wtimeout = options.get("wtimeoutms") + j = options.get("journal") + fsync = options.get("fsync") + return WriteConcern(concern, wtimeout, j, fsync) + + +def _parse_read_concern(options: Mapping[str, Any]) -> ReadConcern: + """Parse read concern options.""" + concern = options.get("readconcernlevel") + return ReadConcern(concern) + + +def _parse_ssl_options(options: Mapping[str, Any]) -> Tuple[Optional[SSLContext], bool]: + """Parse ssl options.""" + use_tls = options.get("tls") + if use_tls is not None: + validate_boolean("tls", use_tls) + + certfile = options.get("tlscertificatekeyfile") + passphrase = options.get("tlscertificatekeyfilepassword") + ca_certs = options.get("tlscafile") + crlfile = options.get("tlscrlfile") + allow_invalid_certificates = options.get("tlsallowinvalidcertificates", False) + allow_invalid_hostnames = options.get("tlsallowinvalidhostnames", False) + disable_ocsp_endpoint_check = options.get("tlsdisableocspendpointcheck", False) + + enabled_tls_opts = [] + for opt in ( + "tlscertificatekeyfile", + "tlscertificatekeyfilepassword", + "tlscafile", + "tlscrlfile", + ): + # Any non-null value of these options implies tls=True. + if opt in options and options[opt]: + enabled_tls_opts.append(opt) + for opt in ( + "tlsallowinvalidcertificates", + "tlsallowinvalidhostnames", + "tlsdisableocspendpointcheck", + ): + # A value of False for these options implies tls=True. + if opt in options and not options[opt]: + enabled_tls_opts.append(opt) + + if enabled_tls_opts: + if use_tls is None: + # Implicitly enable TLS when one of the tls* options is set. + use_tls = True + elif not use_tls: + # Error since tls is explicitly disabled but a tls option is set. + raise ConfigurationError( + "TLS has not been enabled but the " + "following tls parameters have been set: " + "%s. Please set `tls=True` or remove." % ", ".join(enabled_tls_opts) + ) + + if use_tls: + ctx = get_ssl_context( + certfile, + passphrase, + ca_certs, + crlfile, + allow_invalid_certificates, + allow_invalid_hostnames, + disable_ocsp_endpoint_check, + ) + return ctx, allow_invalid_hostnames + return None, allow_invalid_hostnames + + +def _parse_pool_options( + username: str, password: str, database: Optional[str], options: Mapping[str, Any] +) -> PoolOptions: + """Parse connection pool options.""" + credentials = _parse_credentials(username, password, database, options) + max_pool_size = options.get("maxpoolsize", common.MAX_POOL_SIZE) + min_pool_size = options.get("minpoolsize", common.MIN_POOL_SIZE) + max_idle_time_seconds = options.get("maxidletimems", common.MAX_IDLE_TIME_SEC) + if max_pool_size is not None and min_pool_size > max_pool_size: + raise ValueError("minPoolSize must be smaller or equal to maxPoolSize") + connect_timeout = options.get("connecttimeoutms", common.CONNECT_TIMEOUT) + socket_timeout = options.get("sockettimeoutms") + wait_queue_timeout = options.get("waitqueuetimeoutms", common.WAIT_QUEUE_TIMEOUT) + event_listeners = cast(Optional[Sequence[_EventListener]], options.get("event_listeners")) + appname = options.get("appname") + driver = options.get("driver") + server_api = options.get("server_api") + compression_settings = CompressionSettings( + options.get("compressors", []), options.get("zlibcompressionlevel", -1) + ) + ssl_context, tls_allow_invalid_hostnames = _parse_ssl_options(options) + load_balanced = options.get("loadbalanced") + max_connecting = options.get("maxconnecting", common.MAX_CONNECTING) + return PoolOptions( + max_pool_size, + min_pool_size, + max_idle_time_seconds, + connect_timeout, + socket_timeout, + wait_queue_timeout, + ssl_context, + tls_allow_invalid_hostnames, + _EventListeners(event_listeners), + appname, + driver, + compression_settings, + max_connecting=max_connecting, + server_api=server_api, + load_balanced=load_balanced, + credentials=credentials, + ) + + +class ClientOptions: + """Read only configuration options for a MongoClient. + + Should not be instantiated directly by application developers. Access + a client's options via :attr:`pymongo.mongo_client.MongoClient.options` + instead. + """ + + def __init__( + self, username: str, password: str, database: Optional[str], options: Mapping[str, Any] + ): + self.__options = options + self.__codec_options = _parse_codec_options(options) + self.__direct_connection = options.get("directconnection") + self.__local_threshold_ms = options.get("localthresholdms", common.LOCAL_THRESHOLD_MS) + # self.__server_selection_timeout is in seconds. Must use full name for + # common.SERVER_SELECTION_TIMEOUT because it is set directly by tests. + self.__server_selection_timeout = options.get( + "serverselectiontimeoutms", common.SERVER_SELECTION_TIMEOUT + ) + self.__pool_options = _parse_pool_options(username, password, database, options) + self.__read_preference = _parse_read_preference(options) + self.__replica_set_name = options.get("replicaset") + self.__write_concern = _parse_write_concern(options) + self.__read_concern = _parse_read_concern(options) + self.__connect = options.get("connect") + self.__heartbeat_frequency = options.get("heartbeatfrequencyms", common.HEARTBEAT_FREQUENCY) + self.__retry_writes = options.get("retrywrites", common.RETRY_WRITES) + self.__retry_reads = options.get("retryreads", common.RETRY_READS) + self.__server_selector = options.get("server_selector", any_server_selector) + self.__auto_encryption_opts = options.get("auto_encryption_opts") + self.__load_balanced = options.get("loadbalanced") + self.__timeout = options.get("timeoutms") + + @property + def _options(self) -> Mapping[str, Any]: + """The original options used to create this ClientOptions.""" + return self.__options + + @property + def connect(self) -> Optional[bool]: + """Whether to begin discovering a MongoDB topology automatically.""" + return self.__connect + + @property + def codec_options(self) -> CodecOptions: + """A :class:`~bson.codec_options.CodecOptions` instance.""" + return self.__codec_options + + @property + def direct_connection(self) -> Optional[bool]: + """Whether to connect to the deployment in 'Single' topology.""" + return self.__direct_connection + + @property + def local_threshold_ms(self) -> int: + """The local threshold for this instance.""" + return self.__local_threshold_ms + + @property + def server_selection_timeout(self) -> int: + """The server selection timeout for this instance in seconds.""" + return self.__server_selection_timeout + + @property + def server_selector(self) -> _ServerSelector: + return self.__server_selector + + @property + def heartbeat_frequency(self) -> int: + """The monitoring frequency in seconds.""" + return self.__heartbeat_frequency + + @property + def pool_options(self) -> PoolOptions: + """A :class:`~pymongo.pool.PoolOptions` instance.""" + return self.__pool_options + + @property + def read_preference(self) -> _ServerMode: + """A read preference instance.""" + return self.__read_preference + + @property + def replica_set_name(self) -> Optional[str]: + """Replica set name or None.""" + return self.__replica_set_name + + @property + def write_concern(self) -> WriteConcern: + """A :class:`~pymongo.write_concern.WriteConcern` instance.""" + return self.__write_concern + + @property + def read_concern(self) -> ReadConcern: + """A :class:`~pymongo.read_concern.ReadConcern` instance.""" + return self.__read_concern + + @property + def timeout(self) -> Optional[float]: + """The configured timeoutMS converted to seconds, or None. + + .. versionadded: 4.2 + """ + return self.__timeout + + @property + def retry_writes(self) -> bool: + """If this instance should retry supported write operations.""" + return self.__retry_writes + + @property + def retry_reads(self) -> bool: + """If this instance should retry supported read operations.""" + return self.__retry_reads + + @property + def auto_encryption_opts(self) -> Optional[AutoEncryptionOpts]: + """A :class:`~pymongo.encryption.AutoEncryptionOpts` or None.""" + return self.__auto_encryption_opts + + @property + def load_balanced(self) -> Optional[bool]: + """True if the client was configured to connect to a load balancer.""" + return self.__load_balanced + + @property + def event_listeners(self) -> List[_EventListeners]: + """The event listeners registered for this client. + + See :mod:`~pymongo.monitoring` for details. + + .. versionadded:: 4.0 + """ + assert self.__pool_options._event_listeners is not None + return self.__pool_options._event_listeners.event_listeners() diff --git a/backend/test/lib/python3.8/site-packages/pymongo/client_session.py b/backend/test/lib/python3.8/site-packages/pymongo/client_session.py new file mode 100644 index 0000000000000000000000000000000000000000..24fb979992f3ebc36704d21546886946230b5bac --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/client_session.py @@ -0,0 +1,1149 @@ +# Copyright 2017 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Logical sessions for ordering sequential operations. + +.. versionadded:: 3.6 + +Causally Consistent Reads +========================= + +.. code-block:: python + + with client.start_session(causal_consistency=True) as session: + collection = client.db.collection + collection.update_one({"_id": 1}, {"$set": {"x": 10}}, session=session) + secondary_c = collection.with_options(read_preference=ReadPreference.SECONDARY) + + # A secondary read waits for replication of the write. + secondary_c.find_one({"_id": 1}, session=session) + +If `causal_consistency` is True (the default), read operations that use +the session are causally after previous read and write operations. Using a +causally consistent session, an application can read its own writes and is +guaranteed monotonic reads, even when reading from replica set secondaries. + +.. seealso:: The MongoDB documentation on `causal-consistency <https://dochub.mongodb.org/core/causal-consistency>`_. + +.. _transactions-ref: + +Transactions +============ + +.. versionadded:: 3.7 + +MongoDB 4.0 adds support for transactions on replica set primaries. A +transaction is associated with a :class:`ClientSession`. To start a transaction +on a session, use :meth:`ClientSession.start_transaction` in a with-statement. +Then, execute an operation within the transaction by passing the session to the +operation: + +.. code-block:: python + + orders = client.db.orders + inventory = client.db.inventory + with client.start_session() as session: + with session.start_transaction(): + orders.insert_one({"sku": "abc123", "qty": 100}, session=session) + inventory.update_one( + {"sku": "abc123", "qty": {"$gte": 100}}, + {"$inc": {"qty": -100}}, + session=session, + ) + +Upon normal completion of ``with session.start_transaction()`` block, the +transaction automatically calls :meth:`ClientSession.commit_transaction`. +If the block exits with an exception, the transaction automatically calls +:meth:`ClientSession.abort_transaction`. + +In general, multi-document transactions only support read/write (CRUD) +operations on existing collections. However, MongoDB 4.4 adds support for +creating collections and indexes with some limitations, including an +insert operation that would result in the creation of a new collection. +For a complete description of all the supported and unsupported operations +see the `MongoDB server's documentation for transactions +<http://dochub.mongodb.org/core/transactions>`_. + +A session may only have a single active transaction at a time, multiple +transactions on the same session can be executed in sequence. + +Sharded Transactions +^^^^^^^^^^^^^^^^^^^^ + +.. versionadded:: 3.9 + +PyMongo 3.9 adds support for transactions on sharded clusters running MongoDB +>=4.2. Sharded transactions have the same API as replica set transactions. +When running a transaction against a sharded cluster, the session is +pinned to the mongos server selected for the first operation in the +transaction. All subsequent operations that are part of the same transaction +are routed to the same mongos server. When the transaction is completed, by +running either commitTransaction or abortTransaction, the session is unpinned. + +.. seealso:: The MongoDB documentation on `transactions <https://dochub.mongodb.org/core/transactions>`_. + +.. _snapshot-reads-ref: + +Snapshot Reads +============== + +.. versionadded:: 3.12 + +MongoDB 5.0 adds support for snapshot reads. Snapshot reads are requested by +passing the ``snapshot`` option to +:meth:`~pymongo.mongo_client.MongoClient.start_session`. +If ``snapshot`` is True, all read operations that use this session read data +from the same snapshot timestamp. The server chooses the latest +majority-committed snapshot timestamp when executing the first read operation +using the session. Subsequent reads on this session read from the same +snapshot timestamp. Snapshot reads are also supported when reading from +replica set secondaries. + +.. code-block:: python + + # Each read using this session reads data from the same point in time. + with client.start_session(snapshot=True) as session: + order = orders.find_one({"sku": "abc123"}, session=session) + inventory = inventory.find_one({"sku": "abc123"}, session=session) + +Snapshot Reads Limitations +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Snapshot reads sessions are incompatible with ``causal_consistency=True``. +Only the following read operations are supported in a snapshot reads session: + +- :meth:`~pymongo.collection.Collection.find` +- :meth:`~pymongo.collection.Collection.find_one` +- :meth:`~pymongo.collection.Collection.aggregate` +- :meth:`~pymongo.collection.Collection.count_documents` +- :meth:`~pymongo.collection.Collection.distinct` (on unsharded collections) + +Classes +======= +""" + +from __future__ import annotations + +import collections +import time +import uuid +from collections.abc import Mapping as _Mapping +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ContextManager, + Dict, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Type, + TypeVar, +) + +from bson.binary import Binary +from bson.int64 import Int64 +from bson.son import SON +from bson.timestamp import Timestamp +from pymongo import _csot +from pymongo.cursor import _ConnectionManager +from pymongo.errors import ( + ConfigurationError, + ConnectionFailure, + InvalidOperation, + OperationFailure, + PyMongoError, + WTimeoutError, +) +from pymongo.helpers import _RETRYABLE_ERROR_CODES +from pymongo.read_concern import ReadConcern +from pymongo.read_preferences import ReadPreference, _ServerMode +from pymongo.server_type import SERVER_TYPE +from pymongo.write_concern import WriteConcern + +if TYPE_CHECKING: + from types import TracebackType + + from pymongo.pool import Connection + from pymongo.server import Server + from pymongo.typings import ClusterTime, _Address + + +class SessionOptions: + """Options for a new :class:`ClientSession`. + + :Parameters: + - `causal_consistency` (optional): If True, read operations are causally + ordered within the session. Defaults to True when the ``snapshot`` + option is ``False``. + - `default_transaction_options` (optional): The default + TransactionOptions to use for transactions started on this session. + - `snapshot` (optional): If True, then all reads performed using this + session will read from the same snapshot. This option is incompatible + with ``causal_consistency=True``. Defaults to ``False``. + + .. versionchanged:: 3.12 + Added the ``snapshot`` parameter. + """ + + def __init__( + self, + causal_consistency: Optional[bool] = None, + default_transaction_options: Optional["TransactionOptions"] = None, + snapshot: Optional[bool] = False, + ) -> None: + if snapshot: + if causal_consistency: + raise ConfigurationError("snapshot reads do not support causal_consistency=True") + causal_consistency = False + elif causal_consistency is None: + causal_consistency = True + self._causal_consistency = causal_consistency + if default_transaction_options is not None: + if not isinstance(default_transaction_options, TransactionOptions): + raise TypeError( + "default_transaction_options must be an instance of " + "pymongo.client_session.TransactionOptions, not: {!r}".format( + default_transaction_options + ) + ) + self._default_transaction_options = default_transaction_options + self._snapshot = snapshot + + @property + def causal_consistency(self) -> bool: + """Whether causal consistency is configured.""" + return self._causal_consistency + + @property + def default_transaction_options(self) -> Optional["TransactionOptions"]: + """The default TransactionOptions to use for transactions started on + this session. + + .. versionadded:: 3.7 + """ + return self._default_transaction_options + + @property + def snapshot(self) -> Optional[bool]: + """Whether snapshot reads are configured. + + .. versionadded:: 3.12 + """ + return self._snapshot + + +class TransactionOptions: + """Options for :meth:`ClientSession.start_transaction`. + + :Parameters: + - `read_concern` (optional): The + :class:`~pymongo.read_concern.ReadConcern` to use for this transaction. + If ``None`` (the default) the :attr:`read_preference` of + the :class:`MongoClient` is used. + - `write_concern` (optional): The + :class:`~pymongo.write_concern.WriteConcern` to use for this + transaction. If ``None`` (the default) the :attr:`read_preference` of + the :class:`MongoClient` is used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) the :attr:`read_preference` of this + :class:`MongoClient` is used. See :mod:`~pymongo.read_preferences` + for options. Transactions which read must use + :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. + - `max_commit_time_ms` (optional): The maximum amount of time to allow a + single commitTransaction command to run. This option is an alias for + maxTimeMS option on the commitTransaction command. If ``None`` (the + default) maxTimeMS is not used. + + .. versionchanged:: 3.9 + Added the ``max_commit_time_ms`` option. + + .. versionadded:: 3.7 + """ + + def __init__( + self, + read_concern: Optional[ReadConcern] = None, + write_concern: Optional[WriteConcern] = None, + read_preference: Optional[_ServerMode] = None, + max_commit_time_ms: Optional[int] = None, + ) -> None: + self._read_concern = read_concern + self._write_concern = write_concern + self._read_preference = read_preference + self._max_commit_time_ms = max_commit_time_ms + if read_concern is not None: + if not isinstance(read_concern, ReadConcern): + raise TypeError( + "read_concern must be an instance of " + "pymongo.read_concern.ReadConcern, not: {!r}".format(read_concern) + ) + if write_concern is not None: + if not isinstance(write_concern, WriteConcern): + raise TypeError( + "write_concern must be an instance of " + "pymongo.write_concern.WriteConcern, not: {!r}".format(write_concern) + ) + if not write_concern.acknowledged: + raise ConfigurationError( + "transactions do not support unacknowledged write concern" + ": {!r}".format(write_concern) + ) + if read_preference is not None: + if not isinstance(read_preference, _ServerMode): + raise TypeError( + "{!r} is not valid for read_preference. See " + "pymongo.read_preferences for valid " + "options.".format(read_preference) + ) + if max_commit_time_ms is not None: + if not isinstance(max_commit_time_ms, int): + raise TypeError("max_commit_time_ms must be an integer or None") + + @property + def read_concern(self) -> Optional[ReadConcern]: + """This transaction's :class:`~pymongo.read_concern.ReadConcern`.""" + return self._read_concern + + @property + def write_concern(self) -> Optional[WriteConcern]: + """This transaction's :class:`~pymongo.write_concern.WriteConcern`.""" + return self._write_concern + + @property + def read_preference(self) -> Optional[_ServerMode]: + """This transaction's :class:`~pymongo.read_preferences.ReadPreference`.""" + return self._read_preference + + @property + def max_commit_time_ms(self) -> Optional[int]: + """The maxTimeMS to use when running a commitTransaction command. + + .. versionadded:: 3.9 + """ + return self._max_commit_time_ms + + +def _validate_session_write_concern( + session: Optional[ClientSession], write_concern: Optional[WriteConcern] +) -> Optional[ClientSession]: + """Validate that an explicit session is not used with an unack'ed write. + + Returns the session to use for the next operation. + """ + if session: + if write_concern is not None and not write_concern.acknowledged: + # For unacknowledged writes without an explicit session, + # drivers SHOULD NOT use an implicit session. If a driver + # creates an implicit session for unacknowledged writes + # without an explicit session, the driver MUST NOT send the + # session ID. + if session._implicit: + return None + else: + raise ConfigurationError( + "Explicit sessions are incompatible with " + "unacknowledged write concern: {!r}".format(write_concern) + ) + return session + + +class _TransactionContext: + """Internal transaction context manager for start_transaction.""" + + def __init__(self, session: ClientSession): + self.__session = session + + def __enter__(self) -> _TransactionContext: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + if self.__session.in_transaction: + if exc_val is None: + self.__session.commit_transaction() + else: + self.__session.abort_transaction() + + +class _TxnState: + NONE = 1 + STARTING = 2 + IN_PROGRESS = 3 + COMMITTED = 4 + COMMITTED_EMPTY = 5 + ABORTED = 6 + + +class _Transaction: + """Internal class to hold transaction information in a ClientSession.""" + + def __init__(self, opts: Optional[TransactionOptions], client: MongoClient): + self.opts = opts + self.state = _TxnState.NONE + self.sharded = False + self.pinned_address: Optional[_Address] = None + self.conn_mgr: Optional[_ConnectionManager] = None + self.recovery_token = None + self.attempt = 0 + self.client = client + + def active(self) -> bool: + return self.state in (_TxnState.STARTING, _TxnState.IN_PROGRESS) + + def starting(self) -> bool: + return self.state == _TxnState.STARTING + + @property + def pinned_conn(self) -> Optional[Connection]: + if self.active() and self.conn_mgr: + return self.conn_mgr.conn + return None + + def pin(self, server: Server, conn: Connection) -> None: + self.sharded = True + self.pinned_address = server.description.address + if server.description.server_type == SERVER_TYPE.LoadBalancer: + conn.pin_txn() + self.conn_mgr = _ConnectionManager(conn, False) + + def unpin(self) -> None: + self.pinned_address = None + if self.conn_mgr: + self.conn_mgr.close() + self.conn_mgr = None + + def reset(self) -> None: + self.unpin() + self.state = _TxnState.NONE + self.sharded = False + self.recovery_token = None + self.attempt = 0 + + def __del__(self) -> None: + if self.conn_mgr: + # Reuse the cursor closing machinery to return the socket to the + # pool soon. + self.client._close_cursor_soon(0, None, self.conn_mgr) + self.conn_mgr = None + + +def _reraise_with_unknown_commit(exc: Any) -> NoReturn: + """Re-raise an exception with the UnknownTransactionCommitResult label.""" + exc._add_error_label("UnknownTransactionCommitResult") + raise + + +def _max_time_expired_error(exc: PyMongoError) -> bool: + """Return true if exc is a MaxTimeMSExpired error.""" + return isinstance(exc, OperationFailure) and exc.code == 50 + + +# From the transactions spec, all the retryable writes errors plus +# WriteConcernFailed. +_UNKNOWN_COMMIT_ERROR_CODES: frozenset = _RETRYABLE_ERROR_CODES | frozenset( + [ + 64, # WriteConcernFailed + 50, # MaxTimeMSExpired + ] +) + +# From the Convenient API for Transactions spec, with_transaction must +# halt retries after 120 seconds. +# This limit is non-configurable and was chosen to be twice the 60 second +# default value of MongoDB's `transactionLifetimeLimitSeconds` parameter. +_WITH_TRANSACTION_RETRY_TIME_LIMIT = 120 + + +def _within_time_limit(start_time: float) -> bool: + """Are we within the with_transaction retry limit?""" + return time.monotonic() - start_time < _WITH_TRANSACTION_RETRY_TIME_LIMIT + + +_T = TypeVar("_T") + +if TYPE_CHECKING: + from pymongo.mongo_client import MongoClient + + +class ClientSession: + """A session for ordering sequential operations. + + :class:`ClientSession` instances are **not thread-safe or fork-safe**. + They can only be used by one thread or process at a time. A single + :class:`ClientSession` cannot be used to run multiple operations + concurrently. + + Should not be initialized directly by application developers - to create a + :class:`ClientSession`, call + :meth:`~pymongo.mongo_client.MongoClient.start_session`. + """ + + def __init__( + self, + client: MongoClient, + server_session: Any, + options: SessionOptions, + implicit: bool, + ) -> None: + # A MongoClient, a _ServerSession, a SessionOptions, and a set. + self._client: MongoClient = client + self._server_session = server_session + self._options = options + self._cluster_time: Optional[Mapping[str, Any]] = None + self._operation_time: Optional[Timestamp] = None + self._snapshot_time = None + # Is this an implicitly created session? + self._implicit = implicit + self._transaction = _Transaction(None, client) + + def end_session(self) -> None: + """Finish this session. If a transaction has started, abort it. + + It is an error to use the session after the session has ended. + """ + self._end_session(lock=True) + + def _end_session(self, lock: bool) -> None: + if self._server_session is not None: + try: + if self.in_transaction: + self.abort_transaction() + # It's possible we're still pinned here when the transaction + # is in the committed state when the session is discarded. + self._unpin() + finally: + self._client._return_server_session(self._server_session, lock) + self._server_session = None + + def _check_ended(self) -> None: + if self._server_session is None: + raise InvalidOperation("Cannot use ended session") + + def __enter__(self) -> "ClientSession": + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + self._end_session(lock=True) + + @property + def client(self) -> MongoClient: + """The :class:`~pymongo.mongo_client.MongoClient` this session was + created from. + """ + return self._client + + @property + def options(self) -> SessionOptions: + """The :class:`SessionOptions` this session was created with.""" + return self._options + + @property + def session_id(self) -> Mapping[str, Any]: + """A BSON document, the opaque server session identifier.""" + self._check_ended() + return self._server_session.session_id + + @property + def cluster_time(self) -> Optional[ClusterTime]: + """The cluster time returned by the last operation executed + in this session. + """ + return self._cluster_time + + @property + def operation_time(self) -> Optional[Timestamp]: + """The operation time returned by the last operation executed + in this session. + """ + return self._operation_time + + def _inherit_option(self, name: str, val: _T) -> _T: + """Return the inherited TransactionOption value.""" + if val: + return val + txn_opts = self.options.default_transaction_options + parent_val = txn_opts and getattr(txn_opts, name) + if parent_val: + return parent_val + return getattr(self.client, name) + + def with_transaction( + self, + callback: Callable[["ClientSession"], _T], + read_concern: Optional[ReadConcern] = None, + write_concern: Optional[WriteConcern] = None, + read_preference: Optional[_ServerMode] = None, + max_commit_time_ms: Optional[int] = None, + ) -> _T: + """Execute a callback in a transaction. + + This method starts a transaction on this session, executes ``callback`` + once, and then commits the transaction. For example:: + + def callback(session): + orders = session.client.db.orders + inventory = session.client.db.inventory + orders.insert_one({"sku": "abc123", "qty": 100}, session=session) + inventory.update_one({"sku": "abc123", "qty": {"$gte": 100}}, + {"$inc": {"qty": -100}}, session=session) + + with client.start_session() as session: + session.with_transaction(callback) + + To pass arbitrary arguments to the ``callback``, wrap your callable + with a ``lambda`` like this:: + + def callback(session, custom_arg, custom_kwarg=None): + # Transaction operations... + + with client.start_session() as session: + session.with_transaction( + lambda s: callback(s, "custom_arg", custom_kwarg=1)) + + In the event of an exception, ``with_transaction`` may retry the commit + or the entire transaction, therefore ``callback`` may be invoked + multiple times by a single call to ``with_transaction``. Developers + should be mindful of this possibility when writing a ``callback`` that + modifies application state or has any other side-effects. + Note that even when the ``callback`` is invoked multiple times, + ``with_transaction`` ensures that the transaction will be committed + at-most-once on the server. + + The ``callback`` should not attempt to start new transactions, but + should simply run operations meant to be contained within a + transaction. The ``callback`` should also not commit the transaction; + this is handled automatically by ``with_transaction``. If the + ``callback`` does commit or abort the transaction without error, + however, ``with_transaction`` will return without taking further + action. + + :class:`ClientSession` instances are **not thread-safe or fork-safe**. + Consequently, the ``callback`` must not attempt to execute multiple + operations concurrently. + + When ``callback`` raises an exception, ``with_transaction`` + automatically aborts the current transaction. When ``callback`` or + :meth:`~ClientSession.commit_transaction` raises an exception that + includes the ``"TransientTransactionError"`` error label, + ``with_transaction`` starts a new transaction and re-executes + the ``callback``. + + When :meth:`~ClientSession.commit_transaction` raises an exception with + the ``"UnknownTransactionCommitResult"`` error label, + ``with_transaction`` retries the commit until the result of the + transaction is known. + + This method will cease retrying after 120 seconds has elapsed. This + timeout is not configurable and any exception raised by the + ``callback`` or by :meth:`ClientSession.commit_transaction` after the + timeout is reached will be re-raised. Applications that desire a + different timeout duration should not use this method. + + :Parameters: + - `callback`: The callable ``callback`` to run inside a transaction. + The callable must accept a single argument, this session. Note, + under certain error conditions the callback may be run multiple + times. + - `read_concern` (optional): The + :class:`~pymongo.read_concern.ReadConcern` to use for this + transaction. + - `write_concern` (optional): The + :class:`~pymongo.write_concern.WriteConcern` to use for this + transaction. + - `read_preference` (optional): The read preference to use for this + transaction. If ``None`` (the default) the :attr:`read_preference` + of this :class:`Database` is used. See + :mod:`~pymongo.read_preferences` for options. + + :Returns: + The return value of the ``callback``. + + .. versionadded:: 3.9 + """ + start_time = time.monotonic() + while True: + self.start_transaction(read_concern, write_concern, read_preference, max_commit_time_ms) + try: + ret = callback(self) + except Exception as exc: + if self.in_transaction: + self.abort_transaction() + if ( + isinstance(exc, PyMongoError) + and exc.has_error_label("TransientTransactionError") + and _within_time_limit(start_time) + ): + # Retry the entire transaction. + continue + raise + + if not self.in_transaction: + # Assume callback intentionally ended the transaction. + return ret + + while True: + try: + self.commit_transaction() + except PyMongoError as exc: + if ( + exc.has_error_label("UnknownTransactionCommitResult") + and _within_time_limit(start_time) + and not _max_time_expired_error(exc) + ): + # Retry the commit. + continue + + if exc.has_error_label("TransientTransactionError") and _within_time_limit( + start_time + ): + # Retry the entire transaction. + break + raise + + # Commit succeeded. + return ret + + def start_transaction( + self, + read_concern: Optional[ReadConcern] = None, + write_concern: Optional[WriteConcern] = None, + read_preference: Optional[_ServerMode] = None, + max_commit_time_ms: Optional[int] = None, + ) -> ContextManager: + """Start a multi-statement transaction. + + Takes the same arguments as :class:`TransactionOptions`. + + .. versionchanged:: 3.9 + Added the ``max_commit_time_ms`` option. + + .. versionadded:: 3.7 + """ + self._check_ended() + + if self.options.snapshot: + raise InvalidOperation("Transactions are not supported in snapshot sessions") + + if self.in_transaction: + raise InvalidOperation("Transaction already in progress") + + read_concern = self._inherit_option("read_concern", read_concern) + write_concern = self._inherit_option("write_concern", write_concern) + read_preference = self._inherit_option("read_preference", read_preference) + if max_commit_time_ms is None: + opts = self.options.default_transaction_options + if opts: + max_commit_time_ms = opts.max_commit_time_ms + + self._transaction.opts = TransactionOptions( + read_concern, write_concern, read_preference, max_commit_time_ms + ) + self._transaction.reset() + self._transaction.state = _TxnState.STARTING + self._start_retryable_write() + return _TransactionContext(self) + + def commit_transaction(self) -> None: + """Commit a multi-statement transaction. + + .. versionadded:: 3.7 + """ + self._check_ended() + state = self._transaction.state + if state is _TxnState.NONE: + raise InvalidOperation("No transaction started") + elif state in (_TxnState.STARTING, _TxnState.COMMITTED_EMPTY): + # Server transaction was never started, no need to send a command. + self._transaction.state = _TxnState.COMMITTED_EMPTY + return + elif state is _TxnState.ABORTED: + raise InvalidOperation("Cannot call commitTransaction after calling abortTransaction") + elif state is _TxnState.COMMITTED: + # We're explicitly retrying the commit, move the state back to + # "in progress" so that in_transaction returns true. + self._transaction.state = _TxnState.IN_PROGRESS + + try: + self._finish_transaction_with_retry("commitTransaction") + except ConnectionFailure as exc: + # We do not know if the commit was successfully applied on the + # server or if it satisfied the provided write concern, set the + # unknown commit error label. + exc._remove_error_label("TransientTransactionError") + _reraise_with_unknown_commit(exc) + except WTimeoutError as exc: + # We do not know if the commit has satisfied the provided write + # concern, add the unknown commit error label. + _reraise_with_unknown_commit(exc) + except OperationFailure as exc: + if exc.code not in _UNKNOWN_COMMIT_ERROR_CODES: + # The server reports errorLabels in the case. + raise + # We do not know if the commit was successfully applied on the + # server or if it satisfied the provided write concern, set the + # unknown commit error label. + _reraise_with_unknown_commit(exc) + finally: + self._transaction.state = _TxnState.COMMITTED + + def abort_transaction(self) -> None: + """Abort a multi-statement transaction. + + .. versionadded:: 3.7 + """ + self._check_ended() + + state = self._transaction.state + if state is _TxnState.NONE: + raise InvalidOperation("No transaction started") + elif state is _TxnState.STARTING: + # Server transaction was never started, no need to send a command. + self._transaction.state = _TxnState.ABORTED + return + elif state is _TxnState.ABORTED: + raise InvalidOperation("Cannot call abortTransaction twice") + elif state in (_TxnState.COMMITTED, _TxnState.COMMITTED_EMPTY): + raise InvalidOperation("Cannot call abortTransaction after calling commitTransaction") + + try: + self._finish_transaction_with_retry("abortTransaction") + except (OperationFailure, ConnectionFailure): + # The transactions spec says to ignore abortTransaction errors. + pass + finally: + self._transaction.state = _TxnState.ABORTED + self._unpin() + + def _finish_transaction_with_retry(self, command_name: str) -> Dict[str, Any]: + """Run commit or abort with one retry after any retryable error. + + :Parameters: + - `command_name`: Either "commitTransaction" or "abortTransaction". + """ + + def func( + session: Optional[ClientSession], conn: Connection, retryable: bool + ) -> Dict[str, Any]: + return self._finish_transaction(conn, command_name) + + return self._client._retry_internal(True, func, self, None) + + def _finish_transaction(self, conn: Connection, command_name: str) -> Dict[str, Any]: + self._transaction.attempt += 1 + opts = self._transaction.opts + assert opts + wc = opts.write_concern + cmd = SON([(command_name, 1)]) + if command_name == "commitTransaction": + if opts.max_commit_time_ms and _csot.get_timeout() is None: + cmd["maxTimeMS"] = opts.max_commit_time_ms + + # Transaction spec says that after the initial commit attempt, + # subsequent commitTransaction commands should be upgraded to use + # w:"majority" and set a default value of 10 seconds for wtimeout. + if self._transaction.attempt > 1: + assert wc + wc_doc = wc.document + wc_doc["w"] = "majority" + wc_doc.setdefault("wtimeout", 10000) + wc = WriteConcern(**wc_doc) + + if self._transaction.recovery_token: + cmd["recoveryToken"] = self._transaction.recovery_token + + return self._client.admin._command( + conn, cmd, session=self, write_concern=wc, parse_write_concern_error=True + ) + + def _advance_cluster_time(self, cluster_time: Optional[Mapping[str, Any]]) -> None: + """Internal cluster time helper.""" + if self._cluster_time is None: + self._cluster_time = cluster_time + elif cluster_time is not None: + if cluster_time["clusterTime"] > self._cluster_time["clusterTime"]: + self._cluster_time = cluster_time + + def advance_cluster_time(self, cluster_time: Mapping[str, Any]) -> None: + """Update the cluster time for this session. + + :Parameters: + - `cluster_time`: The + :data:`~pymongo.client_session.ClientSession.cluster_time` from + another `ClientSession` instance. + """ + if not isinstance(cluster_time, _Mapping): + raise TypeError("cluster_time must be a subclass of collections.Mapping") + if not isinstance(cluster_time.get("clusterTime"), Timestamp): + raise ValueError("Invalid cluster_time") + self._advance_cluster_time(cluster_time) + + def _advance_operation_time(self, operation_time: Optional[Timestamp]) -> None: + """Internal operation time helper.""" + if self._operation_time is None: + self._operation_time = operation_time + elif operation_time is not None: + if operation_time > self._operation_time: + self._operation_time = operation_time + + def advance_operation_time(self, operation_time: Timestamp) -> None: + """Update the operation time for this session. + + :Parameters: + - `operation_time`: The + :data:`~pymongo.client_session.ClientSession.operation_time` from + another `ClientSession` instance. + """ + if not isinstance(operation_time, Timestamp): + raise TypeError("operation_time must be an instance of bson.timestamp.Timestamp") + self._advance_operation_time(operation_time) + + def _process_response(self, reply: Mapping[str, Any]) -> None: + """Process a response to a command that was run with this session.""" + self._advance_cluster_time(reply.get("$clusterTime")) + self._advance_operation_time(reply.get("operationTime")) + if self._options.snapshot and self._snapshot_time is None: + if "cursor" in reply: + ct = reply["cursor"].get("atClusterTime") + else: + ct = reply.get("atClusterTime") + self._snapshot_time = ct + if self.in_transaction and self._transaction.sharded: + recovery_token = reply.get("recoveryToken") + if recovery_token: + self._transaction.recovery_token = recovery_token + + @property + def has_ended(self) -> bool: + """True if this session is finished.""" + return self._server_session is None + + @property + def in_transaction(self) -> bool: + """True if this session has an active multi-statement transaction. + + .. versionadded:: 3.10 + """ + return self._transaction.active() + + @property + def _starting_transaction(self) -> bool: + """True if this session is starting a multi-statement transaction.""" + return self._transaction.starting() + + @property + def _pinned_address(self) -> Optional[_Address]: + """The mongos address this transaction was created on.""" + if self._transaction.active(): + return self._transaction.pinned_address + return None + + @property + def _pinned_connection(self) -> Optional[Connection]: + """The connection this transaction was started on.""" + return self._transaction.pinned_conn + + def _pin(self, server: Server, conn: Connection) -> None: + """Pin this session to the given Server or to the given connection.""" + self._transaction.pin(server, conn) + + def _unpin(self) -> None: + """Unpin this session from any pinned Server.""" + self._transaction.unpin() + + def _txn_read_preference(self) -> Optional[_ServerMode]: + """Return read preference of this transaction or None.""" + if self.in_transaction: + assert self._transaction.opts + return self._transaction.opts.read_preference + return None + + def _materialize(self) -> None: + if isinstance(self._server_session, _EmptyServerSession): + old = self._server_session + self._server_session = self._client._topology.get_server_session() + if old.started_retryable_write: + self._server_session.inc_transaction_id() + + def _apply_to( + self, + command: MutableMapping[str, Any], + is_retryable: bool, + read_preference: _ServerMode, + conn: Connection, + ) -> None: + self._check_ended() + self._materialize() + if self.options.snapshot: + self._update_read_concern(command, conn) + + self._server_session.last_use = time.monotonic() + command["lsid"] = self._server_session.session_id + + if is_retryable: + command["txnNumber"] = self._server_session.transaction_id + return + + if self.in_transaction: + if read_preference != ReadPreference.PRIMARY: + raise InvalidOperation( + "read preference in a transaction must be primary, not: " + "{!r}".format(read_preference) + ) + + if self._transaction.state == _TxnState.STARTING: + # First command begins a new transaction. + self._transaction.state = _TxnState.IN_PROGRESS + command["startTransaction"] = True + + assert self._transaction.opts + if self._transaction.opts.read_concern: + rc = self._transaction.opts.read_concern.document + if rc: + command["readConcern"] = rc + self._update_read_concern(command, conn) + + command["txnNumber"] = self._server_session.transaction_id + command["autocommit"] = False + + def _start_retryable_write(self) -> None: + self._check_ended() + self._server_session.inc_transaction_id() + + def _update_read_concern(self, cmd: MutableMapping[str, Any], conn: Connection) -> None: + if self.options.causal_consistency and self.operation_time is not None: + cmd.setdefault("readConcern", {})["afterClusterTime"] = self.operation_time + if self.options.snapshot: + if conn.max_wire_version < 13: + raise ConfigurationError("Snapshot reads require MongoDB 5.0 or later") + rc = cmd.setdefault("readConcern", {}) + rc["level"] = "snapshot" + if self._snapshot_time is not None: + rc["atClusterTime"] = self._snapshot_time + + def __copy__(self) -> NoReturn: + raise TypeError("A ClientSession cannot be copied, create a new session instead") + + +class _EmptyServerSession: + __slots__ = "dirty", "started_retryable_write" + + def __init__(self) -> None: + self.dirty = False + self.started_retryable_write = False + + def mark_dirty(self) -> None: + self.dirty = True + + def inc_transaction_id(self) -> None: + self.started_retryable_write = True + + +class _ServerSession: + def __init__(self, generation: int): + # Ensure id is type 4, regardless of CodecOptions.uuid_representation. + self.session_id = {"id": Binary(uuid.uuid4().bytes, 4)} + self.last_use = time.monotonic() + self._transaction_id = 0 + self.dirty = False + self.generation = generation + + def mark_dirty(self) -> None: + """Mark this session as dirty. + + A server session is marked dirty when a command fails with a network + error. Dirty sessions are later discarded from the server session pool. + """ + self.dirty = True + + def timed_out(self, session_timeout_minutes: float) -> bool: + idle_seconds = time.monotonic() - self.last_use + + # Timed out if we have less than a minute to live. + return idle_seconds > (session_timeout_minutes - 1) * 60 + + @property + def transaction_id(self) -> Int64: + """Positive 64-bit integer.""" + return Int64(self._transaction_id) + + def inc_transaction_id(self) -> None: + self._transaction_id += 1 + + +class _ServerSessionPool(collections.deque): + """Pool of _ServerSession objects. + + This class is not thread-safe, access it while holding the Topology lock. + """ + + def __init__(self, *args: Any, **kwargs: Any): + super().__init__(*args, **kwargs) + self.generation = 0 + + def reset(self) -> None: + self.generation += 1 + self.clear() + + def pop_all(self) -> List[_ServerSession]: + ids = [] + while self: + ids.append(self.pop().session_id) + return ids + + def get_server_session(self, session_timeout_minutes: float) -> _ServerSession: + # Although the Driver Sessions Spec says we only clear stale sessions + # in return_server_session, PyMongo can't take a lock when returning + # sessions from a __del__ method (like in Cursor.__die), so it can't + # clear stale sessions there. In case many sessions were returned via + # __del__, check for stale sessions here too. + self._clear_stale(session_timeout_minutes) + + # The most recently used sessions are on the left. + while self: + s = self.popleft() + if not s.timed_out(session_timeout_minutes): + return s + + return _ServerSession(self.generation) + + def return_server_session( + self, server_session: _ServerSession, session_timeout_minutes: Optional[float] + ) -> None: + if session_timeout_minutes is not None: + self._clear_stale(session_timeout_minutes) + if server_session.timed_out(session_timeout_minutes): + return + self.return_server_session_no_lock(server_session) + + def return_server_session_no_lock(self, server_session: _ServerSession) -> None: + # Discard sessions from an old pool to avoid duplicate sessions in the + # child process after a fork. + if server_session.generation == self.generation and not server_session.dirty: + self.appendleft(server_session) + + def _clear_stale(self, session_timeout_minutes: float) -> None: + # Clear stale sessions. The least recently used are on the right. + while self: + if self[-1].timed_out(session_timeout_minutes): + self.pop() + else: + # The remaining sessions also haven't timed out. + break diff --git a/backend/test/lib/python3.8/site-packages/pymongo/collation.py b/backend/test/lib/python3.8/site-packages/pymongo/collation.py new file mode 100644 index 0000000000000000000000000000000000000000..bada2d9417345ab35b075815a3ca8ff4888d992a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/collation.py @@ -0,0 +1,224 @@ +# Copyright 2016 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for working with `collations`_. + +.. _collations: https://www.mongodb.com/docs/manual/reference/collation/ +""" +from typing import Any, Dict, Mapping, Optional, Union + +from pymongo import common + + +class CollationStrength: + """ + An enum that defines values for `strength` on a + :class:`~pymongo.collation.Collation`. + """ + + PRIMARY = 1 + """Differentiate base (unadorned) characters.""" + + SECONDARY = 2 + """Differentiate character accents.""" + + TERTIARY = 3 + """Differentiate character case.""" + + QUATERNARY = 4 + """Differentiate words with and without punctuation.""" + + IDENTICAL = 5 + """Differentiate unicode code point (characters are exactly identical).""" + + +class CollationAlternate: + """ + An enum that defines values for `alternate` on a + :class:`~pymongo.collation.Collation`. + """ + + NON_IGNORABLE = "non-ignorable" + """Spaces and punctuation are treated as base characters.""" + + SHIFTED = "shifted" + """Spaces and punctuation are *not* considered base characters. + + Spaces and punctuation are distinguished regardless when the + :class:`~pymongo.collation.Collation` strength is at least + :data:`~pymongo.collation.CollationStrength.QUATERNARY`. + + """ + + +class CollationMaxVariable: + """ + An enum that defines values for `max_variable` on a + :class:`~pymongo.collation.Collation`. + """ + + PUNCT = "punct" + """Both punctuation and spaces are ignored.""" + + SPACE = "space" + """Spaces alone are ignored.""" + + +class CollationCaseFirst: + """ + An enum that defines values for `case_first` on a + :class:`~pymongo.collation.Collation`. + """ + + UPPER = "upper" + """Sort uppercase characters first.""" + + LOWER = "lower" + """Sort lowercase characters first.""" + + OFF = "off" + """Default for locale or collation strength.""" + + +class Collation: + """Collation + + :Parameters: + - `locale`: (string) The locale of the collation. This should be a string + that identifies an `ICU locale ID` exactly. For example, ``en_US`` is + valid, but ``en_us`` and ``en-US`` are not. Consult the MongoDB + documentation for a list of supported locales. + - `caseLevel`: (optional) If ``True``, turn on case sensitivity if + `strength` is 1 or 2 (case sensitivity is implied if `strength` is + greater than 2). Defaults to ``False``. + - `caseFirst`: (optional) Specify that either uppercase or lowercase + characters take precedence. Must be one of the following values: + + * :data:`~CollationCaseFirst.UPPER` + * :data:`~CollationCaseFirst.LOWER` + * :data:`~CollationCaseFirst.OFF` (the default) + + - `strength`: (optional) Specify the comparison strength. This is also + known as the ICU comparison level. This must be one of the following + values: + + * :data:`~CollationStrength.PRIMARY` + * :data:`~CollationStrength.SECONDARY` + * :data:`~CollationStrength.TERTIARY` (the default) + * :data:`~CollationStrength.QUATERNARY` + * :data:`~CollationStrength.IDENTICAL` + + Each successive level builds upon the previous. For example, a + `strength` of :data:`~CollationStrength.SECONDARY` differentiates + characters based both on the unadorned base character and its accents. + + - `numericOrdering`: (optional) If ``True``, order numbers numerically + instead of in collation order (defaults to ``False``). + - `alternate`: (optional) Specify whether spaces and punctuation are + considered base characters. This must be one of the following values: + + * :data:`~CollationAlternate.NON_IGNORABLE` (the default) + * :data:`~CollationAlternate.SHIFTED` + + - `maxVariable`: (optional) When `alternate` is + :data:`~CollationAlternate.SHIFTED`, this option specifies what + characters may be ignored. This must be one of the following values: + + * :data:`~CollationMaxVariable.PUNCT` (the default) + * :data:`~CollationMaxVariable.SPACE` + + - `normalization`: (optional) If ``True``, normalizes text into Unicode + NFD. Defaults to ``False``. + - `backwards`: (optional) If ``True``, accents on characters are + considered from the back of the word to the front, as it is done in some + French dictionary ordering traditions. Defaults to ``False``. + - `kwargs`: (optional) Keyword arguments supplying any additional options + to be sent with this Collation object. + + .. versionadded: 3.4 + + """ + + __slots__ = ("__document",) + + def __init__( + self, + locale: str, + caseLevel: Optional[bool] = None, + caseFirst: Optional[str] = None, + strength: Optional[int] = None, + numericOrdering: Optional[bool] = None, + alternate: Optional[str] = None, + maxVariable: Optional[str] = None, + normalization: Optional[bool] = None, + backwards: Optional[bool] = None, + **kwargs: Any, + ) -> None: + locale = common.validate_string("locale", locale) + self.__document: Dict[str, Any] = {"locale": locale} + if caseLevel is not None: + self.__document["caseLevel"] = common.validate_boolean("caseLevel", caseLevel) + if caseFirst is not None: + self.__document["caseFirst"] = common.validate_string("caseFirst", caseFirst) + if strength is not None: + self.__document["strength"] = common.validate_integer("strength", strength) + if numericOrdering is not None: + self.__document["numericOrdering"] = common.validate_boolean( + "numericOrdering", numericOrdering + ) + if alternate is not None: + self.__document["alternate"] = common.validate_string("alternate", alternate) + if maxVariable is not None: + self.__document["maxVariable"] = common.validate_string("maxVariable", maxVariable) + if normalization is not None: + self.__document["normalization"] = common.validate_boolean( + "normalization", normalization + ) + if backwards is not None: + self.__document["backwards"] = common.validate_boolean("backwards", backwards) + self.__document.update(kwargs) + + @property + def document(self) -> Dict[str, Any]: + """The document representation of this collation. + + .. note:: + :class:`Collation` is immutable. Mutating the value of + :attr:`document` does not mutate this :class:`Collation`. + """ + return self.__document.copy() + + def __repr__(self) -> str: + document = self.document + return "Collation({})".format(", ".join(f"{key}={document[key]!r}" for key in document)) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Collation): + return self.document == other.document + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + +def validate_collation_or_none( + value: Optional[Union[Mapping[str, Any], Collation]] +) -> Optional[Dict[str, Any]]: + if value is None: + return None + if isinstance(value, Collation): + return value.document + if isinstance(value, dict): + return value + raise TypeError("collation must be a dict, an instance of collation.Collation, or None.") diff --git a/backend/test/lib/python3.8/site-packages/pymongo/collection.py b/backend/test/lib/python3.8/site-packages/pymongo/collection.py new file mode 100644 index 0000000000000000000000000000000000000000..772e43e952debe47709f9838a6b86fc1d93cbc50 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/collection.py @@ -0,0 +1,3513 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Collection level utilities for Mongo.""" +from __future__ import annotations + +from collections import abc +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ContextManager, + Generic, + Iterable, + Iterator, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Sequence, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +from bson.codec_options import DEFAULT_CODEC_OPTIONS, CodecOptions +from bson.objectid import ObjectId +from bson.raw_bson import RawBSONDocument +from bson.son import SON +from bson.timestamp import Timestamp +from pymongo import ASCENDING, _csot, common, helpers, message +from pymongo.aggregation import ( + _CollectionAggregationCommand, + _CollectionRawAggregationCommand, +) +from pymongo.bulk import _Bulk +from pymongo.change_stream import CollectionChangeStream +from pymongo.collation import validate_collation_or_none +from pymongo.command_cursor import CommandCursor, RawBatchCommandCursor +from pymongo.common import _ecoc_coll_name, _esc_coll_name +from pymongo.cursor import Cursor, RawBatchCursor +from pymongo.errors import ( + ConfigurationError, + InvalidName, + InvalidOperation, + OperationFailure, +) +from pymongo.helpers import _check_write_command_response +from pymongo.message import _UNICODE_REPLACE_CODEC_OPTIONS +from pymongo.operations import ( + DeleteMany, + DeleteOne, + IndexModel, + InsertOne, + ReplaceOne, + SearchIndexModel, + UpdateMany, + UpdateOne, + _IndexKeyHint, + _IndexList, +) +from pymongo.read_preferences import ReadPreference, _ServerMode +from pymongo.results import ( + BulkWriteResult, + DeleteResult, + InsertManyResult, + InsertOneResult, + UpdateResult, +) +from pymongo.typings import _CollationIn, _DocumentType, _DocumentTypeArg, _Pipeline +from pymongo.write_concern import WriteConcern + +T = TypeVar("T") + +_FIND_AND_MODIFY_DOC_FIELDS = {"value": 1} + + +_WriteOp = Union[ + InsertOne[_DocumentType], + DeleteOne, + DeleteMany, + ReplaceOne[_DocumentType], + UpdateOne, + UpdateMany, +] + + +class ReturnDocument: + """An enum used with + :meth:`~pymongo.collection.Collection.find_one_and_replace` and + :meth:`~pymongo.collection.Collection.find_one_and_update`. + """ + + BEFORE = False + """Return the original document before it was updated/replaced, or + ``None`` if no document matches the query. + """ + AFTER = True + """Return the updated/replaced or inserted document.""" + + +if TYPE_CHECKING: + + import bson + from pymongo.aggregation import _AggregationCommand + from pymongo.client_session import ClientSession + from pymongo.collation import Collation + from pymongo.database import Database + from pymongo.pool import Connection + from pymongo.read_concern import ReadConcern + from pymongo.server import Server + + +class Collection(common.BaseObject, Generic[_DocumentType]): + """A Mongo collection.""" + + def __init__( + self, + database: Database[_DocumentType], + name: str, + create: Optional[bool] = False, + codec_options: Optional[CodecOptions[_DocumentTypeArg]] = None, + read_preference: Optional[_ServerMode] = None, + write_concern: Optional[WriteConcern] = None, + read_concern: Optional[ReadConcern] = None, + session: Optional[ClientSession] = None, + **kwargs: Any, + ) -> None: + """Get / create a Mongo collection. + + Raises :class:`TypeError` if `name` is not an instance of + :class:`str`. Raises :class:`~pymongo.errors.InvalidName` if `name` is + not a valid collection name. Any additional keyword arguments will be used + as options passed to the create command. See + :meth:`~pymongo.database.Database.create_collection` for valid + options. + + If `create` is ``True``, `collation` is specified, or any additional + keyword arguments are present, a ``create`` command will be + sent, using ``session`` if specified. Otherwise, a ``create`` command + will not be sent and the collection will be created implicitly on first + use. The optional ``session`` argument is *only* used for the ``create`` + command, it is not associated with the collection afterward. + + :Parameters: + - `database`: the database to get a collection from + - `name`: the name of the collection to get + - `create` (optional): if ``True``, force collection + creation even without options being set + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. If ``None`` (the + default) database.codec_options is used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) database.read_preference is used. + - `write_concern` (optional): An instance of + :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the + default) database.write_concern is used. + - `read_concern` (optional): An instance of + :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the + default) database.read_concern is used. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. If a collation is provided, + it will be passed to the create collection command. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession` that is used with + the create collection command + - `**kwargs` (optional): additional keyword arguments will + be passed as options for the create collection command + + .. versionchanged:: 4.2 + Added the ``clusteredIndex`` and ``encryptedFields`` parameters. + + .. versionchanged:: 4.0 + Removed the reindex, map_reduce, inline_map_reduce, + parallel_scan, initialize_unordered_bulk_op, + initialize_ordered_bulk_op, group, count, insert, save, + update, remove, find_and_modify, and ensure_index methods. See the + :ref:`pymongo4-migration-guide`. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.4 + Support the `collation` option. + + .. versionchanged:: 3.2 + Added the read_concern option. + + .. versionchanged:: 3.0 + Added the codec_options, read_preference, and write_concern options. + Removed the uuid_subtype attribute. + :class:`~pymongo.collection.Collection` no longer returns an + instance of :class:`~pymongo.collection.Collection` for attribute + names with leading underscores. You must use dict-style lookups + instead:: + + collection['__my_collection__'] + + Not: + + collection.__my_collection__ + + .. seealso:: The MongoDB documentation on `collections <https://dochub.mongodb.org/core/collections>`_. + """ + super().__init__( + codec_options or database.codec_options, + read_preference or database.read_preference, + write_concern or database.write_concern, + read_concern or database.read_concern, + ) + if not isinstance(name, str): + raise TypeError("name must be an instance of str") + + if not name or ".." in name: + raise InvalidName("collection names cannot be empty") + if "$" in name and not (name.startswith(("oplog.$main", "$cmd"))): + raise InvalidName("collection names must not contain '$': %r" % name) + if name[0] == "." or name[-1] == ".": + raise InvalidName("collection names must not start or end with '.': %r" % name) + if "\x00" in name: + raise InvalidName("collection names must not contain the null character") + collation = validate_collation_or_none(kwargs.pop("collation", None)) + + self.__database: Database[_DocumentType] = database + self.__name = name + self.__full_name = f"{self.__database.name}.{self.__name}" + self.__write_response_codec_options = self.codec_options._replace( + unicode_decode_error_handler="replace", document_class=dict + ) + self._timeout = database.client.options.timeout + encrypted_fields = kwargs.pop("encryptedFields", None) + if create or kwargs or collation: + if encrypted_fields: + common.validate_is_mapping("encrypted_fields", encrypted_fields) + opts = {"clusteredIndex": {"key": {"_id": 1}, "unique": True}} + self.__create( + _esc_coll_name(encrypted_fields, name), opts, None, session, qev2_required=True + ) + self.__create(_ecoc_coll_name(encrypted_fields, name), opts, None, session) + self.__create(name, kwargs, collation, session, encrypted_fields=encrypted_fields) + self.create_index([("__safeContent__", ASCENDING)], session) + else: + self.__create(name, kwargs, collation, session) + + def _conn_for_reads( + self, session: ClientSession + ) -> ContextManager[Tuple[Connection, _ServerMode]]: + return self.__database.client._conn_for_reads(self._read_preference_for(session), session) + + def _conn_for_writes(self, session: Optional[ClientSession]) -> ContextManager[Connection]: + return self.__database.client._conn_for_writes(session) + + def _command( + self, + conn: Connection, + command: MutableMapping[str, Any], + read_preference: Optional[_ServerMode] = None, + codec_options: Optional[CodecOptions] = None, + check: bool = True, + allowable_errors: Optional[Sequence[Union[str, int]]] = None, + read_concern: Optional[ReadConcern] = None, + write_concern: Optional[WriteConcern] = None, + collation: Optional[_CollationIn] = None, + session: Optional[ClientSession] = None, + retryable_write: bool = False, + user_fields: Optional[Any] = None, + ) -> Mapping[str, Any]: + """Internal command helper. + + :Parameters: + - `conn` - A Connection instance. + - `command` - The command itself, as a :class:`~bson.son.SON` instance. + - `read_preference` (optional) - The read preference to use. + - `codec_options` (optional) - An instance of + :class:`~bson.codec_options.CodecOptions`. + - `check`: raise OperationFailure if there are errors + - `allowable_errors`: errors to ignore if `check` is True + - `read_concern` (optional) - An instance of + :class:`~pymongo.read_concern.ReadConcern`. + - `write_concern`: An instance of + :class:`~pymongo.write_concern.WriteConcern`. + - `collation` (optional) - An instance of + :class:`~pymongo.collation.Collation`. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `retryable_write` (optional): True if this command is a retryable + write. + - `user_fields` (optional): Response fields that should be decoded + using the TypeDecoders from codec_options, passed to + bson._decode_all_selective. + + :Returns: + The result document. + """ + with self.__database.client._tmp_session(session) as s: + return conn.command( + self.__database.name, + command, + read_preference or self._read_preference_for(session), + codec_options or self.codec_options, + check, + allowable_errors, + read_concern=read_concern, + write_concern=write_concern, + parse_write_concern_error=True, + collation=collation, + session=s, + client=self.__database.client, + retryable_write=retryable_write, + user_fields=user_fields, + ) + + def __create( + self, + name: str, + options: MutableMapping[str, Any], + collation: Optional[_CollationIn], + session: Optional[ClientSession], + encrypted_fields: Optional[Mapping[str, Any]] = None, + qev2_required: bool = False, + ) -> None: + """Sends a create command with the given options.""" + cmd: SON[str, Any] = SON([("create", name)]) + if encrypted_fields: + cmd["encryptedFields"] = encrypted_fields + + if options: + if "size" in options: + options["size"] = float(options["size"]) + cmd.update(options) + with self._conn_for_writes(session) as conn: + if qev2_required and conn.max_wire_version < 21: + raise ConfigurationError( + "Driver support of Queryable Encryption is incompatible with server. " + "Upgrade server to use Queryable Encryption. " + f"Got maxWireVersion {conn.max_wire_version} but need maxWireVersion >= 21 (MongoDB >=7.0)" + ) + + self._command( + conn, + cmd, + read_preference=ReadPreference.PRIMARY, + write_concern=self._write_concern_for(session), + collation=collation, + session=session, + ) + + def __getattr__(self, name: str) -> Collection[_DocumentType]: + """Get a sub-collection of this collection by name. + + Raises InvalidName if an invalid collection name is used. + + :Parameters: + - `name`: the name of the collection to get + """ + if name.startswith("_"): + full_name = f"{self.__name}.{name}" + raise AttributeError( + "Collection has no attribute {!r}. To access the {}" + " collection, use database['{}'].".format(name, full_name, full_name) + ) + return self.__getitem__(name) + + def __getitem__(self, name: str) -> Collection[_DocumentType]: + return Collection( + self.__database, + f"{self.__name}.{name}", + False, + self.codec_options, + self.read_preference, + self.write_concern, + self.read_concern, + ) + + def __repr__(self) -> str: + return f"Collection({self.__database!r}, {self.__name!r})" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Collection): + return self.__database == other.database and self.__name == other.name + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __hash__(self) -> int: + return hash((self.__database, self.__name)) + + def __bool__(self) -> NoReturn: + raise NotImplementedError( + "Collection objects do not implement truth " + "value testing or bool(). Please compare " + "with None instead: collection is not None" + ) + + @property + def full_name(self) -> str: + """The full name of this :class:`Collection`. + + The full name is of the form `database_name.collection_name`. + """ + return self.__full_name + + @property + def name(self) -> str: + """The name of this :class:`Collection`.""" + return self.__name + + @property + def database(self) -> Database[_DocumentType]: + """The :class:`~pymongo.database.Database` that this + :class:`Collection` is a part of. + """ + return self.__database + + def with_options( + self, + codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, + read_preference: Optional[_ServerMode] = None, + write_concern: Optional[WriteConcern] = None, + read_concern: Optional[ReadConcern] = None, + ) -> Collection[_DocumentType]: + """Get a clone of this collection changing the specified settings. + + >>> coll1.read_preference + Primary() + >>> from pymongo import ReadPreference + >>> coll2 = coll1.with_options(read_preference=ReadPreference.SECONDARY) + >>> coll1.read_preference + Primary() + >>> coll2.read_preference + Secondary(tag_sets=None) + + :Parameters: + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. If ``None`` (the + default) the :attr:`codec_options` of this :class:`Collection` + is used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) the :attr:`read_preference` of this + :class:`Collection` is used. See :mod:`~pymongo.read_preferences` + for options. + - `write_concern` (optional): An instance of + :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the + default) the :attr:`write_concern` of this :class:`Collection` + is used. + - `read_concern` (optional): An instance of + :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the + default) the :attr:`read_concern` of this :class:`Collection` + is used. + """ + return Collection( + self.__database, + self.__name, + False, + codec_options or self.codec_options, + read_preference or self.read_preference, + write_concern or self.write_concern, + read_concern or self.read_concern, + ) + + @_csot.apply + def bulk_write( + self, + requests: Sequence[_WriteOp[_DocumentType]], + ordered: bool = True, + bypass_document_validation: bool = False, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + let: Optional[Mapping] = None, + ) -> BulkWriteResult: + """Send a batch of write operations to the server. + + Requests are passed as a list of write operation instances ( + :class:`~pymongo.operations.InsertOne`, + :class:`~pymongo.operations.UpdateOne`, + :class:`~pymongo.operations.UpdateMany`, + :class:`~pymongo.operations.ReplaceOne`, + :class:`~pymongo.operations.DeleteOne`, or + :class:`~pymongo.operations.DeleteMany`). + + >>> for doc in db.test.find({}): + ... print(doc) + ... + {'x': 1, '_id': ObjectId('54f62e60fba5226811f634ef')} + {'x': 1, '_id': ObjectId('54f62e60fba5226811f634f0')} + >>> # DeleteMany, UpdateOne, and UpdateMany are also available. + ... + >>> from pymongo import InsertOne, DeleteOne, ReplaceOne + >>> requests = [InsertOne({'y': 1}), DeleteOne({'x': 1}), + ... ReplaceOne({'w': 1}, {'z': 1}, upsert=True)] + >>> result = db.test.bulk_write(requests) + >>> result.inserted_count + 1 + >>> result.deleted_count + 1 + >>> result.modified_count + 0 + >>> result.upserted_ids + {2: ObjectId('54f62ee28891e756a6e1abd5')} + >>> for doc in db.test.find({}): + ... print(doc) + ... + {'x': 1, '_id': ObjectId('54f62e60fba5226811f634f0')} + {'y': 1, '_id': ObjectId('54f62ee2fba5226811f634f1')} + {'z': 1, '_id': ObjectId('54f62ee28891e756a6e1abd5')} + + :Parameters: + - `requests`: A list of write operations (see examples above). + - `ordered` (optional): If ``True`` (the default) requests will be + performed on the server serially, in the order provided. If an error + occurs all remaining operations are aborted. If ``False`` requests + will be performed on the server in arbitrary order, possibly in + parallel, and all operations will be attempted. + - `bypass_document_validation`: (optional) If ``True``, allows the + write to opt-out of document level validation. Default is + ``False``. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `let` (optional): Map of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. "$$var"). + + :Returns: + An instance of :class:`~pymongo.results.BulkWriteResult`. + + .. seealso:: :ref:`writes-and-ids` + + .. note:: `bypass_document_validation` requires server version + **>= 3.2** + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + Added ``let`` parameter. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.2 + Added bypass_document_validation support + + .. versionadded:: 3.0 + """ + common.validate_list("requests", requests) + + blk = _Bulk(self, ordered, bypass_document_validation, comment=comment, let=let) + for request in requests: + try: + request._add_to_bulk(blk) + except AttributeError: + raise TypeError(f"{request!r} is not a valid request") + + write_concern = self._write_concern_for(session) + bulk_api_result = blk.execute(write_concern, session) + if bulk_api_result is not None: + return BulkWriteResult(bulk_api_result, True) + return BulkWriteResult({}, False) + + def _insert_one( + self, + doc: Mapping[str, Any], + ordered: bool, + write_concern: WriteConcern, + op_id: Optional[int], + bypass_doc_val: bool, + session: Optional[ClientSession], + comment: Optional[Any] = None, + ) -> Any: + """Internal helper for inserting a single document.""" + write_concern = write_concern or self.write_concern + acknowledged = write_concern.acknowledged + command = SON([("insert", self.name), ("ordered", ordered), ("documents", [doc])]) + if comment is not None: + command["comment"] = comment + + def _insert_command( + session: Optional[ClientSession], conn: Connection, retryable_write: bool + ) -> None: + if bypass_doc_val: + command["bypassDocumentValidation"] = True + + result = conn.command( + self.__database.name, + command, + write_concern=write_concern, + codec_options=self.__write_response_codec_options, + session=session, + client=self.__database.client, + retryable_write=retryable_write, + ) + + _check_write_command_response(result) + + self.__database.client._retryable_write(acknowledged, _insert_command, session) + + if not isinstance(doc, RawBSONDocument): + return doc.get("_id") + return None + + def insert_one( + self, + document: Union[_DocumentType, RawBSONDocument], + bypass_document_validation: bool = False, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + ) -> InsertOneResult: + """Insert a single document. + + >>> db.test.count_documents({'x': 1}) + 0 + >>> result = db.test.insert_one({'x': 1}) + >>> result.inserted_id + ObjectId('54f112defba522406c9cc208') + >>> db.test.find_one({'x': 1}) + {'x': 1, '_id': ObjectId('54f112defba522406c9cc208')} + + :Parameters: + - `document`: The document to insert. Must be a mutable mapping + type. If the document does not have an _id field one will be + added automatically. + - `bypass_document_validation`: (optional) If ``True``, allows the + write to opt-out of document level validation. Default is + ``False``. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + + :Returns: + - An instance of :class:`~pymongo.results.InsertOneResult`. + + .. seealso:: :ref:`writes-and-ids` + + .. note:: `bypass_document_validation` requires server version + **>= 3.2** + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.2 + Added bypass_document_validation support + + .. versionadded:: 3.0 + """ + common.validate_is_document_type("document", document) + if not (isinstance(document, RawBSONDocument) or "_id" in document): + document["_id"] = ObjectId() # type: ignore[index] + + write_concern = self._write_concern_for(session) + return InsertOneResult( + self._insert_one( + document, + ordered=True, + write_concern=write_concern, + op_id=None, + bypass_doc_val=bypass_document_validation, + session=session, + comment=comment, + ), + write_concern.acknowledged, + ) + + @_csot.apply + def insert_many( + self, + documents: Iterable[Union[_DocumentType, RawBSONDocument]], + ordered: bool = True, + bypass_document_validation: bool = False, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + ) -> InsertManyResult: + """Insert an iterable of documents. + + >>> db.test.count_documents({}) + 0 + >>> result = db.test.insert_many([{'x': i} for i in range(2)]) + >>> result.inserted_ids + [ObjectId('54f113fffba522406c9cc20e'), ObjectId('54f113fffba522406c9cc20f')] + >>> db.test.count_documents({}) + 2 + + :Parameters: + - `documents`: A iterable of documents to insert. + - `ordered` (optional): If ``True`` (the default) documents will be + inserted on the server serially, in the order provided. If an error + occurs all remaining inserts are aborted. If ``False``, documents + will be inserted on the server in arbitrary order, possibly in + parallel, and all document inserts will be attempted. + - `bypass_document_validation`: (optional) If ``True``, allows the + write to opt-out of document level validation. Default is + ``False``. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + + :Returns: + An instance of :class:`~pymongo.results.InsertManyResult`. + + .. seealso:: :ref:`writes-and-ids` + + .. note:: `bypass_document_validation` requires server version + **>= 3.2** + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.2 + Added bypass_document_validation support + + .. versionadded:: 3.0 + """ + if ( + not isinstance(documents, abc.Iterable) + or isinstance(documents, abc.Mapping) + or not documents + ): + raise TypeError("documents must be a non-empty list") + inserted_ids: List[ObjectId] = [] + + def gen() -> Iterator[Tuple[int, Mapping[str, Any]]]: + """A generator that validates documents and handles _ids.""" + for document in documents: + common.validate_is_document_type("document", document) + if not isinstance(document, RawBSONDocument): + if "_id" not in document: + document["_id"] = ObjectId() # type: ignore[index] + inserted_ids.append(document["_id"]) + yield (message._INSERT, document) + + write_concern = self._write_concern_for(session) + blk = _Bulk(self, ordered, bypass_document_validation, comment=comment) + blk.ops = list(gen()) + blk.execute(write_concern, session=session) + return InsertManyResult(inserted_ids, write_concern.acknowledged) + + def _update( + self, + conn: Connection, + criteria: Mapping[str, Any], + document: Union[Mapping[str, Any], _Pipeline], + upsert: bool = False, + multi: bool = False, + write_concern: Optional[WriteConcern] = None, + op_id: Optional[int] = None, + ordered: bool = True, + bypass_doc_val: Optional[bool] = False, + collation: Optional[_CollationIn] = None, + array_filters: Optional[Sequence[Mapping[str, Any]]] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + retryable_write: bool = False, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + ) -> Optional[Mapping[str, Any]]: + """Internal update / replace helper.""" + common.validate_boolean("upsert", upsert) + collation = validate_collation_or_none(collation) + write_concern = write_concern or self.write_concern + acknowledged = write_concern.acknowledged + update_doc: SON[str, Any] = SON( + [("q", criteria), ("u", document), ("multi", multi), ("upsert", upsert)] + ) + if collation is not None: + if not acknowledged: + raise ConfigurationError("Collation is unsupported for unacknowledged writes.") + else: + update_doc["collation"] = collation + if array_filters is not None: + if not acknowledged: + raise ConfigurationError("arrayFilters is unsupported for unacknowledged writes.") + else: + update_doc["arrayFilters"] = array_filters + if hint is not None: + if not acknowledged and conn.max_wire_version < 8: + raise ConfigurationError( + "Must be connected to MongoDB 4.2+ to use hint on unacknowledged update commands." + ) + if not isinstance(hint, str): + hint = helpers._index_document(hint) # type: ignore[assignment] + update_doc["hint"] = hint + command = SON([("update", self.name), ("ordered", ordered), ("updates", [update_doc])]) + if let is not None: + common.validate_is_mapping("let", let) + command["let"] = let + + if comment is not None: + command["comment"] = comment + # Update command. + if bypass_doc_val: + command["bypassDocumentValidation"] = True + + # The command result has to be published for APM unmodified + # so we make a shallow copy here before adding updatedExisting. + result = conn.command( + self.__database.name, + command, + write_concern=write_concern, + codec_options=self.__write_response_codec_options, + session=session, + client=self.__database.client, + retryable_write=retryable_write, + ).copy() + _check_write_command_response(result) + # Add the updatedExisting field for compatibility. + if result.get("n") and "upserted" not in result: + result["updatedExisting"] = True + else: + result["updatedExisting"] = False + # MongoDB >= 2.6.0 returns the upsert _id in an array + # element. Break it out for backward compatibility. + if "upserted" in result: + result["upserted"] = result["upserted"][0]["_id"] + + if not acknowledged: + return None + return result + + def _update_retryable( + self, + criteria: Mapping[str, Any], + document: Union[Mapping[str, Any], _Pipeline], + upsert: bool = False, + multi: bool = False, + write_concern: Optional[WriteConcern] = None, + op_id: Optional[int] = None, + ordered: bool = True, + bypass_doc_val: Optional[bool] = False, + collation: Optional[_CollationIn] = None, + array_filters: Optional[Sequence[Mapping[str, Any]]] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + ) -> Optional[Mapping[str, Any]]: + """Internal update / replace helper.""" + + def _update( + session: Optional[ClientSession], conn: Connection, retryable_write: bool + ) -> Optional[Mapping[str, Any]]: + return self._update( + conn, + criteria, + document, + upsert=upsert, + multi=multi, + write_concern=write_concern, + op_id=op_id, + ordered=ordered, + bypass_doc_val=bypass_doc_val, + collation=collation, + array_filters=array_filters, + hint=hint, + session=session, + retryable_write=retryable_write, + let=let, + comment=comment, + ) + + return self.__database.client._retryable_write( + (write_concern or self.write_concern).acknowledged and not multi, _update, session + ) + + def replace_one( + self, + filter: Mapping[str, Any], + replacement: Mapping[str, Any], + upsert: bool = False, + bypass_document_validation: bool = False, + collation: Optional[_CollationIn] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + ) -> UpdateResult: + """Replace a single document matching the filter. + + >>> for doc in db.test.find({}): + ... print(doc) + ... + {'x': 1, '_id': ObjectId('54f4c5befba5220aa4d6dee7')} + >>> result = db.test.replace_one({'x': 1}, {'y': 1}) + >>> result.matched_count + 1 + >>> result.modified_count + 1 + >>> for doc in db.test.find({}): + ... print(doc) + ... + {'y': 1, '_id': ObjectId('54f4c5befba5220aa4d6dee7')} + + The *upsert* option can be used to insert a new document if a matching + document does not exist. + + >>> result = db.test.replace_one({'x': 1}, {'x': 1}, True) + >>> result.matched_count + 0 + >>> result.modified_count + 0 + >>> result.upserted_id + ObjectId('54f11e5c8891e756a6e1abd4') + >>> db.test.find_one({'x': 1}) + {'x': 1, '_id': ObjectId('54f11e5c8891e756a6e1abd4')} + + :Parameters: + - `filter`: A query that matches the document to replace. + - `replacement`: The new document. + - `upsert` (optional): If ``True``, perform an insert if no documents + match the filter. + - `bypass_document_validation`: (optional) If ``True``, allows the + write to opt-out of document level validation. Default is + ``False``. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.2 and above. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `let` (optional): Map of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. "$$var"). + - `comment` (optional): A user-provided comment to attach to this + command. + :Returns: + - An instance of :class:`~pymongo.results.UpdateResult`. + + .. versionchanged:: 4.1 + Added ``let`` parameter. + Added ``comment`` parameter. + .. versionchanged:: 3.11 + Added ``hint`` parameter. + .. versionchanged:: 3.6 + Added ``session`` parameter. + .. versionchanged:: 3.4 + Added the `collation` option. + .. versionchanged:: 3.2 + Added bypass_document_validation support. + + .. versionadded:: 3.0 + """ + common.validate_is_mapping("filter", filter) + common.validate_ok_for_replace(replacement) + if let is not None: + common.validate_is_mapping("let", let) + write_concern = self._write_concern_for(session) + return UpdateResult( + self._update_retryable( + filter, + replacement, + upsert, + write_concern=write_concern, + bypass_doc_val=bypass_document_validation, + collation=collation, + hint=hint, + session=session, + let=let, + comment=comment, + ), + write_concern.acknowledged, + ) + + def update_one( + self, + filter: Mapping[str, Any], + update: Union[Mapping[str, Any], _Pipeline], + upsert: bool = False, + bypass_document_validation: bool = False, + collation: Optional[_CollationIn] = None, + array_filters: Optional[Sequence[Mapping[str, Any]]] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + ) -> UpdateResult: + """Update a single document matching the filter. + + >>> for doc in db.test.find(): + ... print(doc) + ... + {'x': 1, '_id': 0} + {'x': 1, '_id': 1} + {'x': 1, '_id': 2} + >>> result = db.test.update_one({'x': 1}, {'$inc': {'x': 3}}) + >>> result.matched_count + 1 + >>> result.modified_count + 1 + >>> for doc in db.test.find(): + ... print(doc) + ... + {'x': 4, '_id': 0} + {'x': 1, '_id': 1} + {'x': 1, '_id': 2} + + If ``upsert=True`` and no documents match the filter, create a + new document based on the filter criteria and update modifications. + + >>> result = db.test.update_one({'x': -10}, {'$inc': {'x': 3}}, upsert=True) + >>> result.matched_count + 0 + >>> result.modified_count + 0 + >>> result.upserted_id + ObjectId('626a678eeaa80587d4bb3fb7') + >>> db.test.find_one(result.upserted_id) + {'_id': ObjectId('626a678eeaa80587d4bb3fb7'), 'x': -7} + + :Parameters: + - `filter`: A query that matches the document to update. + - `update`: The modifications to apply. + - `upsert` (optional): If ``True``, perform an insert if no documents + match the filter. + - `bypass_document_validation`: (optional) If ``True``, allows the + write to opt-out of document level validation. Default is + ``False``. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `array_filters` (optional): A list of filters specifying which + array elements an update should apply. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.2 and above. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `let` (optional): Map of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. "$$var"). + - `comment` (optional): A user-provided comment to attach to this + command. + + :Returns: + - An instance of :class:`~pymongo.results.UpdateResult`. + + .. versionchanged:: 4.1 + Added ``let`` parameter. + Added ``comment`` parameter. + .. versionchanged:: 3.11 + Added ``hint`` parameter. + .. versionchanged:: 3.9 + Added the ability to accept a pipeline as the ``update``. + .. versionchanged:: 3.6 + Added the ``array_filters`` and ``session`` parameters. + .. versionchanged:: 3.4 + Added the ``collation`` option. + .. versionchanged:: 3.2 + Added ``bypass_document_validation`` support. + + .. versionadded:: 3.0 + """ + common.validate_is_mapping("filter", filter) + common.validate_ok_for_update(update) + common.validate_list_or_none("array_filters", array_filters) + + write_concern = self._write_concern_for(session) + return UpdateResult( + self._update_retryable( + filter, + update, + upsert, + write_concern=write_concern, + bypass_doc_val=bypass_document_validation, + collation=collation, + array_filters=array_filters, + hint=hint, + session=session, + let=let, + comment=comment, + ), + write_concern.acknowledged, + ) + + def update_many( + self, + filter: Mapping[str, Any], + update: Union[Mapping[str, Any], _Pipeline], + upsert: bool = False, + array_filters: Optional[Sequence[Mapping[str, Any]]] = None, + bypass_document_validation: Optional[bool] = None, + collation: Optional[_CollationIn] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + ) -> UpdateResult: + """Update one or more documents that match the filter. + + >>> for doc in db.test.find(): + ... print(doc) + ... + {'x': 1, '_id': 0} + {'x': 1, '_id': 1} + {'x': 1, '_id': 2} + >>> result = db.test.update_many({'x': 1}, {'$inc': {'x': 3}}) + >>> result.matched_count + 3 + >>> result.modified_count + 3 + >>> for doc in db.test.find(): + ... print(doc) + ... + {'x': 4, '_id': 0} + {'x': 4, '_id': 1} + {'x': 4, '_id': 2} + + :Parameters: + - `filter`: A query that matches the documents to update. + - `update`: The modifications to apply. + - `upsert` (optional): If ``True``, perform an insert if no documents + match the filter. + - `bypass_document_validation` (optional): If ``True``, allows the + write to opt-out of document level validation. Default is + ``False``. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `array_filters` (optional): A list of filters specifying which + array elements an update should apply. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.2 and above. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `let` (optional): Map of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. "$$var"). + - `comment` (optional): A user-provided comment to attach to this + command. + + :Returns: + - An instance of :class:`~pymongo.results.UpdateResult`. + + .. versionchanged:: 4.1 + Added ``let`` parameter. + Added ``comment`` parameter. + .. versionchanged:: 3.11 + Added ``hint`` parameter. + .. versionchanged:: 3.9 + Added the ability to accept a pipeline as the `update`. + .. versionchanged:: 3.6 + Added ``array_filters`` and ``session`` parameters. + .. versionchanged:: 3.4 + Added the `collation` option. + .. versionchanged:: 3.2 + Added bypass_document_validation support. + + .. versionadded:: 3.0 + """ + common.validate_is_mapping("filter", filter) + common.validate_ok_for_update(update) + common.validate_list_or_none("array_filters", array_filters) + + write_concern = self._write_concern_for(session) + return UpdateResult( + self._update_retryable( + filter, + update, + upsert, + multi=True, + write_concern=write_concern, + bypass_doc_val=bypass_document_validation, + collation=collation, + array_filters=array_filters, + hint=hint, + session=session, + let=let, + comment=comment, + ), + write_concern.acknowledged, + ) + + def drop( + self, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + encrypted_fields: Optional[Mapping[str, Any]] = None, + ) -> None: + """Alias for :meth:`~pymongo.database.Database.drop_collection`. + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `encrypted_fields`: **(BETA)** Document that describes the encrypted fields for + Queryable Encryption. + + The following two calls are equivalent: + + >>> db.foo.drop() + >>> db.drop_collection("foo") + + .. versionchanged:: 4.2 + Added ``encrypted_fields`` parameter. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.7 + :meth:`drop` now respects this :class:`Collection`'s :attr:`write_concern`. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + dbo = self.__database.client.get_database( + self.__database.name, + self.codec_options, + self.read_preference, + self.write_concern, + self.read_concern, + ) + dbo.drop_collection( + self.__name, session=session, comment=comment, encrypted_fields=encrypted_fields + ) + + def _delete( + self, + conn: Connection, + criteria: Mapping[str, Any], + multi: bool, + write_concern: Optional[WriteConcern] = None, + op_id: Optional[int] = None, + ordered: bool = True, + collation: Optional[_CollationIn] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + retryable_write: bool = False, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + ) -> Mapping[str, Any]: + """Internal delete helper.""" + common.validate_is_mapping("filter", criteria) + write_concern = write_concern or self.write_concern + acknowledged = write_concern.acknowledged + delete_doc = SON([("q", criteria), ("limit", int(not multi))]) + collation = validate_collation_or_none(collation) + if collation is not None: + if not acknowledged: + raise ConfigurationError("Collation is unsupported for unacknowledged writes.") + else: + delete_doc["collation"] = collation + if hint is not None: + if not acknowledged and conn.max_wire_version < 9: + raise ConfigurationError( + "Must be connected to MongoDB 4.4+ to use hint on unacknowledged delete commands." + ) + if not isinstance(hint, str): + hint = helpers._index_document(hint) # type: ignore[assignment] + delete_doc["hint"] = hint + command = SON([("delete", self.name), ("ordered", ordered), ("deletes", [delete_doc])]) + + if let is not None: + common.validate_is_document_type("let", let) + command["let"] = let + + if comment is not None: + command["comment"] = comment + + # Delete command. + result = conn.command( + self.__database.name, + command, + write_concern=write_concern, + codec_options=self.__write_response_codec_options, + session=session, + client=self.__database.client, + retryable_write=retryable_write, + ) + _check_write_command_response(result) + return result + + def _delete_retryable( + self, + criteria: Mapping[str, Any], + multi: bool, + write_concern: Optional[WriteConcern] = None, + op_id: Optional[int] = None, + ordered: bool = True, + collation: Optional[_CollationIn] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + ) -> Mapping[str, Any]: + """Internal delete helper.""" + + def _delete( + session: Optional[ClientSession], conn: Connection, retryable_write: bool + ) -> Mapping[str, Any]: + return self._delete( + conn, + criteria, + multi, + write_concern=write_concern, + op_id=op_id, + ordered=ordered, + collation=collation, + hint=hint, + session=session, + retryable_write=retryable_write, + let=let, + comment=comment, + ) + + return self.__database.client._retryable_write( + (write_concern or self.write_concern).acknowledged and not multi, _delete, session + ) + + def delete_one( + self, + filter: Mapping[str, Any], + collation: Optional[_CollationIn] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + ) -> DeleteResult: + """Delete a single document matching the filter. + + >>> db.test.count_documents({'x': 1}) + 3 + >>> result = db.test.delete_one({'x': 1}) + >>> result.deleted_count + 1 + >>> db.test.count_documents({'x': 1}) + 2 + + :Parameters: + - `filter`: A query that matches the document to delete. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.4 and above. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `let` (optional): Map of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. "$$var"). + - `comment` (optional): A user-provided comment to attach to this + command. + + :Returns: + - An instance of :class:`~pymongo.results.DeleteResult`. + + .. versionchanged:: 4.1 + Added ``let`` parameter. + Added ``comment`` parameter. + .. versionchanged:: 3.11 + Added ``hint`` parameter. + .. versionchanged:: 3.6 + Added ``session`` parameter. + .. versionchanged:: 3.4 + Added the `collation` option. + .. versionadded:: 3.0 + """ + write_concern = self._write_concern_for(session) + return DeleteResult( + self._delete_retryable( + filter, + False, + write_concern=write_concern, + collation=collation, + hint=hint, + session=session, + let=let, + comment=comment, + ), + write_concern.acknowledged, + ) + + def delete_many( + self, + filter: Mapping[str, Any], + collation: Optional[_CollationIn] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + ) -> DeleteResult: + """Delete one or more documents matching the filter. + + >>> db.test.count_documents({'x': 1}) + 3 + >>> result = db.test.delete_many({'x': 1}) + >>> result.deleted_count + 3 + >>> db.test.count_documents({'x': 1}) + 0 + + :Parameters: + - `filter`: A query that matches the documents to delete. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.4 and above. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `let` (optional): Map of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. "$$var"). + - `comment` (optional): A user-provided comment to attach to this + command. + + :Returns: + - An instance of :class:`~pymongo.results.DeleteResult`. + + .. versionchanged:: 4.1 + Added ``let`` parameter. + Added ``comment`` parameter. + .. versionchanged:: 3.11 + Added ``hint`` parameter. + .. versionchanged:: 3.6 + Added ``session`` parameter. + .. versionchanged:: 3.4 + Added the `collation` option. + .. versionadded:: 3.0 + """ + write_concern = self._write_concern_for(session) + return DeleteResult( + self._delete_retryable( + filter, + True, + write_concern=write_concern, + collation=collation, + hint=hint, + session=session, + let=let, + comment=comment, + ), + write_concern.acknowledged, + ) + + def find_one( + self, filter: Optional[Any] = None, *args: Any, **kwargs: Any + ) -> Optional[_DocumentType]: + """Get a single document from the database. + + All arguments to :meth:`find` are also valid arguments for + :meth:`find_one`, although any `limit` argument will be + ignored. Returns a single document, or ``None`` if no matching + document is found. + + The :meth:`find_one` method obeys the :attr:`read_preference` of + this :class:`Collection`. + + :Parameters: + + - `filter` (optional): a dictionary specifying + the query to be performed OR any other type to be used as + the value for a query for ``"_id"``. + + - `*args` (optional): any additional positional arguments + are the same as the arguments to :meth:`find`. + + - `**kwargs` (optional): any additional keyword arguments + are the same as the arguments to :meth:`find`. + + >>> collection.find_one(max_time_ms=100) + + """ + if filter is not None and not isinstance(filter, abc.Mapping): + filter = {"_id": filter} + cursor = self.find(filter, *args, **kwargs) + for result in cursor.limit(-1): + return result + return None + + def find(self, *args: Any, **kwargs: Any) -> Cursor[_DocumentType]: + """Query the database. + + The `filter` argument is a query document that all results + must match. For example: + + >>> db.test.find({"hello": "world"}) + + only matches documents that have a key "hello" with value + "world". Matches can have other keys *in addition* to + "hello". The `projection` argument is used to specify a subset + of fields that should be included in the result documents. By + limiting results to a certain subset of fields you can cut + down on network traffic and decoding time. + + Raises :class:`TypeError` if any of the arguments are of + improper type. Returns an instance of + :class:`~pymongo.cursor.Cursor` corresponding to this query. + + The :meth:`find` method obeys the :attr:`read_preference` of + this :class:`Collection`. + + :Parameters: + - `filter` (optional): A query document that selects which documents + to include in the result set. Can be an empty document to include + all documents. + - `projection` (optional): a list of field names that should be + returned in the result set or a dict specifying the fields + to include or exclude. If `projection` is a list "_id" will + always be returned. Use a dict to exclude fields from + the result (e.g. projection={'_id': False}). + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `skip` (optional): the number of documents to omit (from + the start of the result set) when returning the results + - `limit` (optional): the maximum number of results to + return. A limit of 0 (the default) is equivalent to setting no + limit. + - `no_cursor_timeout` (optional): if False (the default), any + returned cursor is closed by the server after 10 minutes of + inactivity. If set to True, the returned cursor will never + time out on the server. Care should be taken to ensure that + cursors with no_cursor_timeout turned on are properly closed. + - `cursor_type` (optional): the type of cursor to return. The valid + options are defined by :class:`~pymongo.cursor.CursorType`: + + - :attr:`~pymongo.cursor.CursorType.NON_TAILABLE` - the result of + this find call will return a standard cursor over the result set. + - :attr:`~pymongo.cursor.CursorType.TAILABLE` - the result of this + find call will be a tailable cursor - tailable cursors are only + for use with capped collections. They are not closed when the + last data is retrieved but are kept open and the cursor location + marks the final document position. If more data is received + iteration of the cursor will continue from the last document + received. For details, see the `tailable cursor documentation + <https://www.mongodb.com/docs/manual/core/tailable-cursors/>`_. + - :attr:`~pymongo.cursor.CursorType.TAILABLE_AWAIT` - the result + of this find call will be a tailable cursor with the await flag + set. The server will wait for a few seconds after returning the + full result set so that it can capture and return additional data + added during the query. + - :attr:`~pymongo.cursor.CursorType.EXHAUST` - the result of this + find call will be an exhaust cursor. MongoDB will stream batched + results to the client without waiting for the client to request + each batch, reducing latency. See notes on compatibility below. + + - `sort` (optional): a list of (key, direction) pairs + specifying the sort order for this query. See + :meth:`~pymongo.cursor.Cursor.sort` for details. + - `allow_partial_results` (optional): if True, mongos will return + partial results if some shards are down instead of returning an + error. + - `oplog_replay` (optional): **DEPRECATED** - if True, set the + oplogReplay query flag. Default: False. + - `batch_size` (optional): Limits the number of documents returned in + a single batch. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `return_key` (optional): If True, return only the index keys in + each document. + - `show_record_id` (optional): If True, adds a field ``$recordId`` in + each document with the storage engine's internal record identifier. + - `snapshot` (optional): **DEPRECATED** - If True, prevents the + cursor from returning a document more than once because of an + intervening write operation. + - `hint` (optional): An index, in the same format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). Pass this as an alternative to calling + :meth:`~pymongo.cursor.Cursor.hint` on the cursor to tell Mongo the + proper index to use for the query. + - `max_time_ms` (optional): Specifies a time limit for a query + operation. If the specified time is exceeded, the operation will be + aborted and :exc:`~pymongo.errors.ExecutionTimeout` is raised. Pass + this as an alternative to calling + :meth:`~pymongo.cursor.Cursor.max_time_ms` on the cursor. + - `max_scan` (optional): **DEPRECATED** - The maximum number of + documents to scan. Pass this as an alternative to calling + :meth:`~pymongo.cursor.Cursor.max_scan` on the cursor. + - `min` (optional): A list of field, limit pairs specifying the + inclusive lower bound for all keys of a specific index in order. + Pass this as an alternative to calling + :meth:`~pymongo.cursor.Cursor.min` on the cursor. ``hint`` must + also be passed to ensure the query utilizes the correct index. + - `max` (optional): A list of field, limit pairs specifying the + exclusive upper bound for all keys of a specific index in order. + Pass this as an alternative to calling + :meth:`~pymongo.cursor.Cursor.max` on the cursor. ``hint`` must + also be passed to ensure the query utilizes the correct index. + - `comment` (optional): A string to attach to the query to help + interpret and trace the operation in the server logs and in profile + data. Pass this as an alternative to calling + :meth:`~pymongo.cursor.Cursor.comment` on the cursor. + - `allow_disk_use` (optional): if True, MongoDB may use temporary + disk files to store data exceeding the system memory limit while + processing a blocking sort operation. The option has no effect if + MongoDB can satisfy the specified sort using an index, or if the + blocking sort requires less memory than the 100 MiB limit. This + option is only supported on MongoDB 4.4 and above. + + .. note:: There are a number of caveats to using + :attr:`~pymongo.cursor.CursorType.EXHAUST` as cursor_type: + + - The `limit` option can not be used with an exhaust cursor. + + - Exhaust cursors are not supported by mongos and can not be + used with a sharded cluster. + + - A :class:`~pymongo.cursor.Cursor` instance created with the + :attr:`~pymongo.cursor.CursorType.EXHAUST` cursor_type requires an + exclusive :class:`~socket.socket` connection to MongoDB. If the + :class:`~pymongo.cursor.Cursor` is discarded without being + completely iterated the underlying :class:`~socket.socket` + connection will be closed and discarded without being returned to + the connection pool. + + .. versionchanged:: 4.0 + Removed the ``modifiers`` option. + Empty projections (eg {} or []) are passed to the server as-is, + rather than the previous behavior which substituted in a + projection of ``{"_id": 1}``. This means that an empty projection + will now return the entire document, not just the ``"_id"`` field. + + .. versionchanged:: 3.11 + Added the ``allow_disk_use`` option. + Deprecated the ``oplog_replay`` option. Support for this option is + deprecated in MongoDB 4.4. The query engine now automatically + optimizes queries against the oplog without requiring this + option to be set. + + .. versionchanged:: 3.7 + Deprecated the ``snapshot`` option, which is deprecated in MongoDB + 3.6 and removed in MongoDB 4.0. + Deprecated the ``max_scan`` option. Support for this option is + deprecated in MongoDB 4.0. Use ``max_time_ms`` instead to limit + server-side execution time. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.5 + Added the options ``return_key``, ``show_record_id``, ``snapshot``, + ``hint``, ``max_time_ms``, ``max_scan``, ``min``, ``max``, and + ``comment``. + Deprecated the ``modifiers`` option. + + .. versionchanged:: 3.4 + Added support for the ``collation`` option. + + .. versionchanged:: 3.0 + Changed the parameter names ``spec``, ``fields``, ``timeout``, and + ``partial`` to ``filter``, ``projection``, ``no_cursor_timeout``, + and ``allow_partial_results`` respectively. + Added the ``cursor_type``, ``oplog_replay``, and ``modifiers`` + options. + Removed the ``network_timeout``, ``read_preference``, ``tag_sets``, + ``secondary_acceptable_latency_ms``, ``max_scan``, ``snapshot``, + ``tailable``, ``await_data``, ``exhaust``, ``as_class``, and + slave_okay parameters. + Removed ``compile_re`` option: PyMongo now always + represents BSON regular expressions as :class:`~bson.regex.Regex` + objects. Use :meth:`~bson.regex.Regex.try_compile` to attempt to + convert from a BSON regular expression to a Python regular + expression object. + Soft deprecated the ``manipulate`` option. + + .. seealso:: The MongoDB documentation on `find <https://dochub.mongodb.org/core/find>`_. + """ + return Cursor(self, *args, **kwargs) + + def find_raw_batches(self, *args: Any, **kwargs: Any) -> RawBatchCursor[_DocumentType]: + """Query the database and retrieve batches of raw BSON. + + Similar to the :meth:`find` method but returns a + :class:`~pymongo.cursor.RawBatchCursor`. + + This example demonstrates how to work with raw batches, but in practice + raw batches should be passed to an external library that can decode + BSON into another data type, rather than used with PyMongo's + :mod:`bson` module. + + >>> import bson + >>> cursor = db.test.find_raw_batches() + >>> for batch in cursor: + ... print(bson.decode_all(batch)) + + .. note:: find_raw_batches does not support auto encryption. + + .. versionchanged:: 3.12 + Instead of ignoring the user-specified read concern, this method + now sends it to the server when connected to MongoDB 3.6+. + + Added session support. + + .. versionadded:: 3.6 + """ + # OP_MSG is required to support encryption. + if self.__database.client._encrypter: + raise InvalidOperation("find_raw_batches does not support auto encryption") + return RawBatchCursor(self, *args, **kwargs) + + def _count_cmd( + self, + session: Optional[ClientSession], + conn: Connection, + read_preference: Optional[_ServerMode], + cmd: SON[str, Any], + collation: Optional[Collation], + ) -> int: + """Internal count command helper.""" + # XXX: "ns missing" checks can be removed when we drop support for + # MongoDB 3.0, see SERVER-17051. + res = self._command( + conn, + cmd, + read_preference=read_preference, + allowable_errors=["ns missing"], + codec_options=self.__write_response_codec_options, + read_concern=self.read_concern, + collation=collation, + session=session, + ) + if res.get("errmsg", "") == "ns missing": + return 0 + return int(res["n"]) + + def _aggregate_one_result( + self, + conn: Connection, + read_preference: Optional[_ServerMode], + cmd: SON[str, Any], + collation: Optional[_CollationIn], + session: Optional[ClientSession], + ) -> Optional[Mapping[str, Any]]: + """Internal helper to run an aggregate that returns a single result.""" + result = self._command( + conn, + cmd, + read_preference, + allowable_errors=[26], # Ignore NamespaceNotFound. + codec_options=self.__write_response_codec_options, + read_concern=self.read_concern, + collation=collation, + session=session, + ) + # cursor will not be present for NamespaceNotFound errors. + if "cursor" not in result: + return None + batch = result["cursor"]["firstBatch"] + return batch[0] if batch else None + + def estimated_document_count(self, comment: Optional[Any] = None, **kwargs: Any) -> int: + """Get an estimate of the number of documents in this collection using + collection metadata. + + The :meth:`estimated_document_count` method is **not** supported in a + transaction. + + All optional parameters should be passed as keyword arguments + to this method. Valid options include: + + - `maxTimeMS` (int): The maximum amount of time to allow this + operation to run, in milliseconds. + + :Parameters: + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): See list of options above. + + .. versionchanged:: 4.2 + This method now always uses the `count`_ command. Due to an oversight in versions + 5.0.0-5.0.8 of MongoDB, the count command was not included in V1 of the + :ref:`versioned-api-ref`. Users of the Stable API with estimated_document_count are + recommended to upgrade their server version to 5.0.9+ or set + :attr:`pymongo.server_api.ServerApi.strict` to ``False`` to avoid encountering errors. + + .. versionadded:: 3.7 + .. _count: https://mongodb.com/docs/manual/reference/command/count/ + """ + if "session" in kwargs: + raise ConfigurationError("estimated_document_count does not support sessions") + if comment is not None: + kwargs["comment"] = comment + + def _cmd( + session: Optional[ClientSession], + server: Server, + conn: Connection, + read_preference: Optional[_ServerMode], + ) -> int: + cmd: SON[str, Any] = SON([("count", self.__name)]) + cmd.update(kwargs) + return self._count_cmd(session, conn, read_preference, cmd, collation=None) + + return self._retryable_non_cursor_read(_cmd, None) + + def count_documents( + self, + filter: Mapping[str, Any], + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> int: + """Count the number of documents in this collection. + + .. note:: For a fast count of the total documents in a collection see + :meth:`estimated_document_count`. + + The :meth:`count_documents` method is supported in a transaction. + + All optional parameters should be passed as keyword arguments + to this method. Valid options include: + + - `skip` (int): The number of matching documents to skip before + returning results. + - `limit` (int): The maximum number of documents to count. Must be + a positive integer. If not provided, no limit is imposed. + - `maxTimeMS` (int): The maximum amount of time to allow this + operation to run, in milliseconds. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `hint` (string or list of tuples): The index to use. Specify either + the index name as a string or the index specification as a list of + tuples (e.g. [('a', pymongo.ASCENDING), ('b', pymongo.ASCENDING)]). + + The :meth:`count_documents` method obeys the :attr:`read_preference` of + this :class:`Collection`. + + .. note:: When migrating from :meth:`count` to :meth:`count_documents` + the following query operators must be replaced: + + +-------------+-------------------------------------+ + | Operator | Replacement | + +=============+=====================================+ + | $where | `$expr`_ | + +-------------+-------------------------------------+ + | $near | `$geoWithin`_ with `$center`_ | + +-------------+-------------------------------------+ + | $nearSphere | `$geoWithin`_ with `$centerSphere`_ | + +-------------+-------------------------------------+ + + :Parameters: + - `filter` (required): A query document that selects which documents + to count in the collection. Can be an empty document to count all + documents. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): See list of options above. + + + .. versionadded:: 3.7 + + .. _$expr: https://mongodb.com/docs/manual/reference/operator/query/expr/ + .. _$geoWithin: https://mongodb.com/docs/manual/reference/operator/query/geoWithin/ + .. _$center: https://mongodb.com/docs/manual/reference/operator/query/center/ + .. _$centerSphere: https://mongodb.com/docs/manual/reference/operator/query/centerSphere/ + """ + pipeline = [{"$match": filter}] + if "skip" in kwargs: + pipeline.append({"$skip": kwargs.pop("skip")}) + if "limit" in kwargs: + pipeline.append({"$limit": kwargs.pop("limit")}) + if comment is not None: + kwargs["comment"] = comment + pipeline.append({"$group": {"_id": 1, "n": {"$sum": 1}}}) + cmd = SON([("aggregate", self.__name), ("pipeline", pipeline), ("cursor", {})]) + if "hint" in kwargs and not isinstance(kwargs["hint"], str): + kwargs["hint"] = helpers._index_document(kwargs["hint"]) + collation = validate_collation_or_none(kwargs.pop("collation", None)) + cmd.update(kwargs) + + def _cmd( + session: Optional[ClientSession], + server: Server, + conn: Connection, + read_preference: Optional[_ServerMode], + ) -> int: + result = self._aggregate_one_result(conn, read_preference, cmd, collation, session) + if not result: + return 0 + return result["n"] + + return self._retryable_non_cursor_read(_cmd, session) + + def _retryable_non_cursor_read( + self, + func: Callable[[Optional[ClientSession], Server, Connection, Optional[_ServerMode]], T], + session: Optional[ClientSession], + ) -> T: + """Non-cursor read helper to handle implicit session creation.""" + client = self.__database.client + with client._tmp_session(session) as s: + return client._retryable_read(func, self._read_preference_for(s), s) + + def create_indexes( + self, + indexes: Sequence[IndexModel], + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> List[str]: + """Create one or more indexes on this collection. + + >>> from pymongo import IndexModel, ASCENDING, DESCENDING + >>> index1 = IndexModel([("hello", DESCENDING), + ... ("world", ASCENDING)], name="hello_world") + >>> index2 = IndexModel([("goodbye", DESCENDING)]) + >>> db.test.create_indexes([index1, index2]) + ["hello_world", "goodbye_-1"] + + :Parameters: + - `indexes`: A list of :class:`~pymongo.operations.IndexModel` + instances. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): optional arguments to the createIndexes + command (like maxTimeMS) can be passed as keyword arguments. + + + + + .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of + this collection is automatically applied to this operation. + + .. versionchanged:: 3.6 + Added ``session`` parameter. Added support for arbitrary keyword + arguments. + + .. versionchanged:: 3.4 + Apply this collection's write concern automatically to this operation + when connected to MongoDB >= 3.4. + .. versionadded:: 3.0 + + .. _createIndexes: https://mongodb.com/docs/manual/reference/command/createIndexes/ + """ + common.validate_list("indexes", indexes) + if comment is not None: + kwargs["comment"] = comment + return self.__create_indexes(indexes, session, **kwargs) + + @_csot.apply + def __create_indexes( + self, indexes: Sequence[IndexModel], session: Optional[ClientSession], **kwargs: Any + ) -> List[str]: + """Internal createIndexes helper. + + :Parameters: + - `indexes`: A list of :class:`~pymongo.operations.IndexModel` + instances. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `**kwargs` (optional): optional arguments to the createIndexes + command (like maxTimeMS) can be passed as keyword arguments. + """ + names = [] + with self._conn_for_writes(session) as conn: + supports_quorum = conn.max_wire_version >= 9 + + def gen_indexes() -> Iterator[Mapping[str, Any]]: + for index in indexes: + if not isinstance(index, IndexModel): + raise TypeError( + f"{index!r} is not an instance of pymongo.operations.IndexModel" + ) + document = index.document + names.append(document["name"]) + yield document + + cmd = SON([("createIndexes", self.name), ("indexes", list(gen_indexes()))]) + cmd.update(kwargs) + if "commitQuorum" in kwargs and not supports_quorum: + raise ConfigurationError( + "Must be connected to MongoDB 4.4+ to use the " + "commitQuorum option for createIndexes" + ) + + self._command( + conn, + cmd, + read_preference=ReadPreference.PRIMARY, + codec_options=_UNICODE_REPLACE_CODEC_OPTIONS, + write_concern=self._write_concern_for(session), + session=session, + ) + return names + + def create_index( + self, + keys: _IndexKeyHint, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> str: + """Creates an index on this collection. + + Takes either a single key or a list containing (key, direction) pairs + or keys. If no direction is given, :data:`~pymongo.ASCENDING` will + be assumed. + The key(s) must be an instance of :class:`str`and the direction(s) must + be one of (:data:`~pymongo.ASCENDING`, :data:`~pymongo.DESCENDING`, + :data:`~pymongo.GEO2D`, :data:`~pymongo.GEOSPHERE`, + :data:`~pymongo.HASHED`, :data:`~pymongo.TEXT`). + + To create a single key ascending index on the key ``'mike'`` we just + use a string argument:: + + >>> my_collection.create_index("mike") + + For a compound index on ``'mike'`` descending and ``'eliot'`` + ascending we need to use a list of tuples:: + + >>> my_collection.create_index([("mike", pymongo.DESCENDING), + ... "eliot"]) + + All optional index creation parameters should be passed as + keyword arguments to this method. For example:: + + >>> my_collection.create_index([("mike", pymongo.DESCENDING)], + ... background=True) + + Valid options include, but are not limited to: + + - `name`: custom name to use for this index - if none is + given, a name will be generated. + - `unique`: if ``True``, creates a uniqueness constraint on the + index. + - `background`: if ``True``, this index should be created in the + background. + - `sparse`: if ``True``, omit from the index any documents that lack + the indexed field. + - `bucketSize`: for use with geoHaystack indexes. + Number of documents to group together within a certain proximity + to a given longitude and latitude. + - `min`: minimum value for keys in a :data:`~pymongo.GEO2D` + index. + - `max`: maximum value for keys in a :data:`~pymongo.GEO2D` + index. + - `expireAfterSeconds`: <int> Used to create an expiring (TTL) + collection. MongoDB will automatically delete documents from + this collection after <int> seconds. The indexed field must + be a UTC datetime or the data will not expire. + - `partialFilterExpression`: A document that specifies a filter for + a partial index. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `wildcardProjection`: Allows users to include or exclude specific + field paths from a `wildcard index`_ using the {"$**" : 1} key + pattern. Requires MongoDB >= 4.2. + - `hidden`: if ``True``, this index will be hidden from the query + planner and will not be evaluated as part of query plan + selection. Requires MongoDB >= 4.4. + + See the MongoDB documentation for a full list of supported options by + server version. + + .. warning:: `dropDups` is not supported by MongoDB 3.0 or newer. The + option is silently ignored by the server and unique index builds + using the option will fail if a duplicate value is detected. + + .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of + this collection is automatically applied to this operation. + + :Parameters: + - `keys`: a single key or a list of (key, direction) + pairs specifying the index to create + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): any additional index creation + options (see the above list) should be passed as keyword + arguments. + + .. versionchanged:: 4.4 + Allow passing a list containing (key, direction) pairs + or keys for the ``keys`` parameter. + .. versionchanged:: 4.1 + Added ``comment`` parameter. + .. versionchanged:: 3.11 + Added the ``hidden`` option. + .. versionchanged:: 3.6 + Added ``session`` parameter. Added support for passing maxTimeMS + in kwargs. + .. versionchanged:: 3.4 + Apply this collection's write concern automatically to this operation + when connected to MongoDB >= 3.4. Support the `collation` option. + .. versionchanged:: 3.2 + Added partialFilterExpression to support partial indexes. + .. versionchanged:: 3.0 + Renamed `key_or_list` to `keys`. Removed the `cache_for` option. + :meth:`create_index` no longer caches index names. Removed support + for the drop_dups and bucket_size aliases. + + .. seealso:: The MongoDB documentation on `indexes <https://dochub.mongodb.org/core/indexes>`_. + + .. _wildcard index: https://dochub.mongodb.org/core/index-wildcard/ + """ + cmd_options = {} + if "maxTimeMS" in kwargs: + cmd_options["maxTimeMS"] = kwargs.pop("maxTimeMS") + if comment is not None: + cmd_options["comment"] = comment + index = IndexModel(keys, **kwargs) + return self.__create_indexes([index], session, **cmd_options)[0] + + def drop_indexes( + self, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> None: + """Drops all indexes on this collection. + + Can be used on non-existent collections or collections with no indexes. + Raises OperationFailure on an error. + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): optional arguments to the createIndexes + command (like maxTimeMS) can be passed as keyword arguments. + + .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of + this collection is automatically applied to this operation. + + .. versionchanged:: 3.6 + Added ``session`` parameter. Added support for arbitrary keyword + arguments. + + .. versionchanged:: 3.4 + Apply this collection's write concern automatically to this operation + when connected to MongoDB >= 3.4. + """ + if comment is not None: + kwargs["comment"] = comment + self.drop_index("*", session=session, **kwargs) + + @_csot.apply + def drop_index( + self, + index_or_name: _IndexKeyHint, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> None: + """Drops the specified index on this collection. + + Can be used on non-existent collections or collections with no + indexes. Raises OperationFailure on an error (e.g. trying to + drop an index that does not exist). `index_or_name` + can be either an index name (as returned by `create_index`), + or an index specifier (as passed to `create_index`). An index + specifier should be a list of (key, direction) pairs. Raises + TypeError if index is not an instance of (str, unicode, list). + + .. warning:: + + if a custom name was used on index creation (by + passing the `name` parameter to :meth:`create_index`) the index + **must** be dropped by name. + + :Parameters: + - `index_or_name`: index (or name of index) to drop + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): optional arguments to the createIndexes + command (like maxTimeMS) can be passed as keyword arguments. + + + + .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of + this collection is automatically applied to this operation. + + + .. versionchanged:: 3.6 + Added ``session`` parameter. Added support for arbitrary keyword + arguments. + + .. versionchanged:: 3.4 + Apply this collection's write concern automatically to this operation + when connected to MongoDB >= 3.4. + + """ + name = index_or_name + if isinstance(index_or_name, list): + name = helpers._gen_index_name(index_or_name) + + if not isinstance(name, str): + raise TypeError("index_or_name must be an instance of str or list") + + cmd = SON([("dropIndexes", self.__name), ("index", name)]) + cmd.update(kwargs) + if comment is not None: + cmd["comment"] = comment + with self._conn_for_writes(session) as conn: + self._command( + conn, + cmd, + read_preference=ReadPreference.PRIMARY, + allowable_errors=["ns not found", 26], + write_concern=self._write_concern_for(session), + session=session, + ) + + def list_indexes( + self, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + ) -> CommandCursor[MutableMapping[str, Any]]: + """Get a cursor over the index documents for this collection. + + >>> for index in db.test.list_indexes(): + ... print(index) + ... + SON([('v', 2), ('key', SON([('_id', 1)])), ('name', '_id_')]) + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + + :Returns: + An instance of :class:`~pymongo.command_cursor.CommandCursor`. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionadded:: 3.0 + """ + codec_options: CodecOptions = CodecOptions(SON) + coll = cast( + Collection[MutableMapping[str, Any]], + self.with_options(codec_options=codec_options, read_preference=ReadPreference.PRIMARY), + ) + read_pref = (session and session._txn_read_preference()) or ReadPreference.PRIMARY + explicit_session = session is not None + + def _cmd( + session: Optional[ClientSession], + server: Server, + conn: Connection, + read_preference: _ServerMode, + ) -> CommandCursor[MutableMapping[str, Any]]: + cmd = SON([("listIndexes", self.__name), ("cursor", {})]) + if comment is not None: + cmd["comment"] = comment + + try: + cursor = self._command(conn, cmd, read_preference, codec_options, session=session)[ + "cursor" + ] + except OperationFailure as exc: + # Ignore NamespaceNotFound errors to match the behavior + # of reading from *.system.indexes. + if exc.code != 26: + raise + cursor = {"id": 0, "firstBatch": []} + cmd_cursor = CommandCursor( + coll, + cursor, + conn.address, + session=session, + explicit_session=explicit_session, + comment=cmd.get("comment"), + ) + cmd_cursor._maybe_pin_connection(conn) + return cmd_cursor + + with self.__database.client._tmp_session(session, False) as s: + return self.__database.client._retryable_read(_cmd, read_pref, s) + + def index_information( + self, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + ) -> MutableMapping[str, Any]: + """Get information on this collection's indexes. + + Returns a dictionary where the keys are index names (as + returned by create_index()) and the values are dictionaries + containing information about each index. The dictionary is + guaranteed to contain at least a single key, ``"key"`` which + is a list of (key, direction) pairs specifying the index (as + passed to create_index()). It will also contain any other + metadata about the indexes, except for the ``"ns"`` and + ``"name"`` keys, which are cleaned. Example output might look + like this: + + >>> db.test.create_index("x", unique=True) + 'x_1' + >>> db.test.index_information() + {'_id_': {'key': [('_id', 1)]}, + 'x_1': {'unique': True, 'key': [('x', 1)]}} + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + cursor = self.list_indexes(session=session, comment=comment) + info = {} + for index in cursor: + index["key"] = list(index["key"].items()) + index = dict(index) + info[index.pop("name")] = index + return info + + def list_search_indexes( + self, + name: Optional[str] = None, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> CommandCursor[Mapping[str, Any]]: + """Return a cursor over search indexes for the current collection. + + :Parameters: + - `name` (optional): If given, the name of the index to search + for. Only indexes with matching index names will be returned. + If not given, all search indexes for the current collection + will be returned. + - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + + :Returns: + A :class:`~pymongo.command_cursor.CommandCursor` over the result + set. + + .. note:: requires a MongoDB server version 7.0+ Atlas cluster. + + .. versionadded:: 4.5 + """ + if name is None: + pipeline: _Pipeline = [{"$listSearchIndexes": {}}] + else: + pipeline = [{"$listSearchIndexes": {"name": name}}] + + coll = self.with_options( + codec_options=DEFAULT_CODEC_OPTIONS, read_preference=ReadPreference.PRIMARY + ) + cmd = _CollectionAggregationCommand( + coll, + CommandCursor, + pipeline, + kwargs, + explicit_session=session is not None, + user_fields={"cursor": {"firstBatch": 1}}, + ) + + return self.__database.client._retryable_read( + cmd.get_cursor, + cmd.get_read_preference(session), # type: ignore[arg-type] + session, + retryable=not cmd._performs_write, + ) + + def create_search_index( + self, + model: Union[Mapping[str, Any], SearchIndexModel], + session: Optional[ClientSession] = None, + comment: Any = None, + **kwargs: Any, + ) -> str: + """Create a single search index for the current collection. + + :Parameters: + - `model`: The model for the new search index. + It can be given as a :class:`~pymongo.operations.SearchIndexModel` + instance or a dictionary with a model "definition" and optional + "name". + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): optional arguments to the createSearchIndexes + command (like maxTimeMS) can be passed as keyword arguments. + + :Returns: + The name of the new search index. + + .. note:: requires a MongoDB server version 7.0+ Atlas cluster. + + .. versionadded:: 4.5 + """ + if not isinstance(model, SearchIndexModel): + model = SearchIndexModel(model["definition"], model.get("name")) + return self.create_search_indexes([model], session, comment, **kwargs)[0] + + def create_search_indexes( + self, + models: List[SearchIndexModel], + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> List[str]: + """Create multiple search indexes for the current collection. + + :Parameters: + - `models`: A list of :class:`~pymongo.operations.SearchIndexModel` instances. + - `session` (optional): a :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): optional arguments to the createSearchIndexes + command (like maxTimeMS) can be passed as keyword arguments. + + :Returns: + A list of the newly created search index names. + + .. note:: requires a MongoDB server version 7.0+ Atlas cluster. + + .. versionadded:: 4.5 + """ + if comment is not None: + kwargs["comment"] = comment + + def gen_indexes() -> Iterator[Mapping[str, Any]]: + for index in models: + if not isinstance(index, SearchIndexModel): + raise TypeError( + f"{index!r} is not an instance of pymongo.operations.SearchIndexModel" + ) + yield index.document + + cmd = SON([("createSearchIndexes", self.name), ("indexes", list(gen_indexes()))]) + cmd.update(kwargs) + + with self._conn_for_writes(session) as conn: + resp = self._command( + conn, + cmd, + read_preference=ReadPreference.PRIMARY, + codec_options=_UNICODE_REPLACE_CODEC_OPTIONS, + ) + return [index["name"] for index in resp["indexesCreated"]] + + def drop_search_index( + self, + name: str, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> None: + """Delete a search index by index name. + + :Parameters: + - `name`: The name of the search index to be deleted. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): optional arguments to the dropSearchIndexes + command (like maxTimeMS) can be passed as keyword arguments. + + .. note:: requires a MongoDB server version 7.0+ Atlas cluster. + + .. versionadded:: 4.5 + """ + cmd = SON([("dropSearchIndex", self.__name), ("name", name)]) + cmd.update(kwargs) + if comment is not None: + cmd["comment"] = comment + with self._conn_for_writes(session) as conn: + self._command( + conn, + cmd, + read_preference=ReadPreference.PRIMARY, + allowable_errors=["ns not found", 26], + codec_options=_UNICODE_REPLACE_CODEC_OPTIONS, + ) + + def update_search_index( + self, + name: str, + definition: Mapping[str, Any], + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> None: + """Update a search index by replacing the existing index definition with the provided definition. + + :Parameters: + - `name`: The name of the search index to be updated. + - `definition`: The new search index definition. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): optional arguments to the updateSearchIndexes + command (like maxTimeMS) can be passed as keyword arguments. + + .. note:: requires a MongoDB server version 7.0+ Atlas cluster. + + .. versionadded:: 4.5 + """ + cmd = SON([("updateSearchIndex", self.__name), ("name", name), ("definition", definition)]) + cmd.update(kwargs) + if comment is not None: + cmd["comment"] = comment + with self._conn_for_writes(session) as conn: + self._command( + conn, + cmd, + read_preference=ReadPreference.PRIMARY, + allowable_errors=["ns not found", 26], + codec_options=_UNICODE_REPLACE_CODEC_OPTIONS, + ) + + def options( + self, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + ) -> MutableMapping[str, Any]: + """Get the options set on this collection. + + Returns a dictionary of options and their values - see + :meth:`~pymongo.database.Database.create_collection` for more + information on the possible options. Returns an empty + dictionary if the collection has not been created yet. + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + dbo = self.__database.client.get_database( + self.__database.name, + self.codec_options, + self.read_preference, + self.write_concern, + self.read_concern, + ) + cursor = dbo.list_collections( + session=session, filter={"name": self.__name}, comment=comment + ) + + result = None + for doc in cursor: + result = doc + break + + if not result: + return {} + + options = result.get("options", {}) + assert options is not None + if "create" in options: + del options["create"] + + return options + + @_csot.apply + def _aggregate( + self, + aggregation_command: Type[_AggregationCommand], + pipeline: _Pipeline, + cursor_class: Type[CommandCursor], + session: Optional[ClientSession], + explicit_session: bool, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> CommandCursor[_DocumentType]: + if comment is not None: + kwargs["comment"] = comment + cmd = aggregation_command( + self, + cursor_class, + pipeline, + kwargs, + explicit_session, + let, + user_fields={"cursor": {"firstBatch": 1}}, + ) + + return self.__database.client._retryable_read( + cmd.get_cursor, + cmd.get_read_preference(session), # type: ignore[arg-type] + session, + retryable=not cmd._performs_write, + ) + + def aggregate( + self, + pipeline: _Pipeline, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> CommandCursor[_DocumentType]: + """Perform an aggregation using the aggregation framework on this + collection. + + The :meth:`aggregate` method obeys the :attr:`read_preference` of this + :class:`Collection`, except when ``$out`` or ``$merge`` are used on + MongoDB <5.0, in which case + :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY` is used. + + .. note:: This method does not support the 'explain' option. Please + use `PyMongoExplain <https://pypi.org/project/pymongoexplain/>`_ + instead. An example is included in the :ref:`aggregate-examples` + documentation. + + .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of + this collection is automatically applied to this operation. + + :Parameters: + - `pipeline`: a list of aggregation pipeline stages + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `**kwargs` (optional): extra `aggregate command`_ parameters. + + All optional `aggregate command`_ parameters should be passed as + keyword arguments to this method. Valid options include, but are not + limited to: + + - `allowDiskUse` (bool): Enables writing to temporary files. When set + to True, aggregation stages can write data to the _tmp subdirectory + of the --dbpath directory. The default is False. + - `maxTimeMS` (int): The maximum amount of time to allow the operation + to run in milliseconds. + - `batchSize` (int): The maximum number of documents to return per + batch. Ignored if the connected mongod or mongos does not support + returning aggregate results using a cursor. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `let` (dict): A dict of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. ``"$$var"``). This option is + only supported on MongoDB >= 5.0. + - `comment` (optional): A user-provided comment to attach to this + command. + + + :Returns: + A :class:`~pymongo.command_cursor.CommandCursor` over the result + set. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + Added ``let`` parameter. + Support $merge and $out executing on secondaries according to the + collection's :attr:`read_preference`. + .. versionchanged:: 4.0 + Removed the ``useCursor`` option. + .. versionchanged:: 3.9 + Apply this collection's read concern to pipelines containing the + `$out` stage when connected to MongoDB >= 4.2. + Added support for the ``$merge`` pipeline stage. + Aggregations that write always use read preference + :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. + .. versionchanged:: 3.6 + Added the `session` parameter. Added the `maxAwaitTimeMS` option. + Deprecated the `useCursor` option. + .. versionchanged:: 3.4 + Apply this collection's write concern automatically to this operation + when connected to MongoDB >= 3.4. Support the `collation` option. + .. versionchanged:: 3.0 + The :meth:`aggregate` method always returns a CommandCursor. The + pipeline argument must be a list. + + .. seealso:: :doc:`/examples/aggregation` + + .. _aggregate command: + https://mongodb.com/docs/manual/reference/command/aggregate + """ + with self.__database.client._tmp_session(session, close=False) as s: + return self._aggregate( + _CollectionAggregationCommand, + pipeline, + CommandCursor, + session=s, + explicit_session=session is not None, + let=let, + comment=comment, + **kwargs, + ) + + def aggregate_raw_batches( + self, + pipeline: _Pipeline, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> RawBatchCursor[_DocumentType]: + """Perform an aggregation and retrieve batches of raw BSON. + + Similar to the :meth:`aggregate` method but returns a + :class:`~pymongo.cursor.RawBatchCursor`. + + This example demonstrates how to work with raw batches, but in practice + raw batches should be passed to an external library that can decode + BSON into another data type, rather than used with PyMongo's + :mod:`bson` module. + + >>> import bson + >>> cursor = db.test.aggregate_raw_batches([ + ... {'$project': {'x': {'$multiply': [2, '$x']}}}]) + >>> for batch in cursor: + ... print(bson.decode_all(batch)) + + .. note:: aggregate_raw_batches does not support auto encryption. + + .. versionchanged:: 3.12 + Added session support. + + .. versionadded:: 3.6 + """ + # OP_MSG is required to support encryption. + if self.__database.client._encrypter: + raise InvalidOperation("aggregate_raw_batches does not support auto encryption") + if comment is not None: + kwargs["comment"] = comment + with self.__database.client._tmp_session(session, close=False) as s: + return cast( + RawBatchCursor[_DocumentType], + self._aggregate( + _CollectionRawAggregationCommand, + pipeline, + RawBatchCommandCursor, + session=s, + explicit_session=session is not None, + **kwargs, + ), + ) + + def watch( + self, + pipeline: Optional[_Pipeline] = None, + full_document: Optional[str] = None, + resume_after: Optional[Mapping[str, Any]] = None, + max_await_time_ms: Optional[int] = None, + batch_size: Optional[int] = None, + collation: Optional[_CollationIn] = None, + start_at_operation_time: Optional[Timestamp] = None, + session: Optional[ClientSession] = None, + start_after: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + full_document_before_change: Optional[str] = None, + show_expanded_events: Optional[bool] = None, + ) -> CollectionChangeStream[_DocumentType]: + """Watch changes on this collection. + + Performs an aggregation with an implicit initial ``$changeStream`` + stage and returns a + :class:`~pymongo.change_stream.CollectionChangeStream` cursor which + iterates over changes on this collection. + + .. code-block:: python + + with db.collection.watch() as stream: + for change in stream: + print(change) + + The :class:`~pymongo.change_stream.CollectionChangeStream` iterable + blocks until the next change document is returned or an error is + raised. If the + :meth:`~pymongo.change_stream.CollectionChangeStream.next` method + encounters a network error when retrieving a batch from the server, + it will automatically attempt to recreate the cursor such that no + change events are missed. Any error encountered during the resume + attempt indicates there may be an outage and will be raised. + + .. code-block:: python + + try: + with db.collection.watch([{"$match": {"operationType": "insert"}}]) as stream: + for insert_change in stream: + print(insert_change) + except pymongo.errors.PyMongoError: + # The ChangeStream encountered an unrecoverable error or the + # resume attempt failed to recreate the cursor. + logging.error("...") + + For a precise description of the resume process see the + `change streams specification`_. + + .. note:: Using this helper method is preferred to directly calling + :meth:`~pymongo.collection.Collection.aggregate` with a + ``$changeStream`` stage, for the purpose of supporting + resumability. + + .. warning:: This Collection's :attr:`read_concern` must be + ``ReadConcern("majority")`` in order to use the ``$changeStream`` + stage. + + :Parameters: + - `pipeline` (optional): A list of aggregation pipeline stages to + append to an initial ``$changeStream`` stage. Not all + pipeline stages are valid after a ``$changeStream`` stage, see the + MongoDB documentation on change streams for the supported stages. + - `full_document` (optional): The fullDocument to pass as an option + to the ``$changeStream`` stage. Allowed values: 'updateLookup', + 'whenAvailable', 'required'. When set to 'updateLookup', the + change notification for partial updates will include both a delta + describing the changes to the document, as well as a copy of the + entire document that was changed from some time after the change + occurred. + - `full_document_before_change`: Allowed values: 'whenAvailable' + and 'required'. Change events may now result in a + 'fullDocumentBeforeChange' response field. + - `resume_after` (optional): A resume token. If provided, the + change stream will start returning changes that occur directly + after the operation specified in the resume token. A resume token + is the _id value of a change document. + - `max_await_time_ms` (optional): The maximum time in milliseconds + for the server to wait for changes before responding to a getMore + operation. + - `batch_size` (optional): The maximum number of documents to return + per batch. + - `collation` (optional): The :class:`~pymongo.collation.Collation` + to use for the aggregation. + - `start_at_operation_time` (optional): If provided, the resulting + change stream will only return changes that occurred at or after + the specified :class:`~bson.timestamp.Timestamp`. Requires + MongoDB >= 4.0. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `start_after` (optional): The same as `resume_after` except that + `start_after` can resume notifications after an invalidate event. + This option and `resume_after` are mutually exclusive. + - `comment` (optional): A user-provided comment to attach to this + command. + - `show_expanded_events` (optional): Include expanded events such as DDL events like `dropIndexes`. + + :Returns: + A :class:`~pymongo.change_stream.CollectionChangeStream` cursor. + + .. versionchanged:: 4.3 + Added `show_expanded_events` parameter. + + .. versionchanged:: 4.2 + Added ``full_document_before_change`` parameter. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.9 + Added the ``start_after`` parameter. + + .. versionchanged:: 3.7 + Added the ``start_at_operation_time`` parameter. + + .. versionadded:: 3.6 + + .. seealso:: The MongoDB documentation on `changeStreams <https://mongodb.com/docs/manual/changeStreams/>`_. + + .. _change streams specification: + https://github.com/mongodb/specifications/blob/master/source/change-streams/change-streams.rst + """ + return CollectionChangeStream( + self, + pipeline, + full_document, + resume_after, + max_await_time_ms, + batch_size, + collation, + start_at_operation_time, + session, + start_after, + comment, + full_document_before_change, + show_expanded_events, + ) + + @_csot.apply + def rename( + self, + new_name: str, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> MutableMapping[str, Any]: + """Rename this collection. + + If operating in auth mode, client must be authorized as an + admin to perform this operation. Raises :class:`TypeError` if + `new_name` is not an instance of :class:`str`. + Raises :class:`~pymongo.errors.InvalidName` + if `new_name` is not a valid collection name. + + :Parameters: + - `new_name`: new name for this collection + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): additional arguments to the rename command + may be passed as keyword arguments to this helper method + (i.e. ``dropTarget=True``) + + .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of + this collection is automatically applied to this operation. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.4 + Apply this collection's write concern automatically to this operation + when connected to MongoDB >= 3.4. + + """ + if not isinstance(new_name, str): + raise TypeError("new_name must be an instance of str") + + if not new_name or ".." in new_name: + raise InvalidName("collection names cannot be empty") + if new_name[0] == "." or new_name[-1] == ".": + raise InvalidName("collection names must not start or end with '.'") + if "$" in new_name and not new_name.startswith("oplog.$main"): + raise InvalidName("collection names must not contain '$'") + + new_name = f"{self.__database.name}.{new_name}" + cmd = SON([("renameCollection", self.__full_name), ("to", new_name)]) + cmd.update(kwargs) + if comment is not None: + cmd["comment"] = comment + write_concern = self._write_concern_for_cmd(cmd, session) + + with self._conn_for_writes(session) as conn: + with self.__database.client._tmp_session(session) as s: + return conn.command( + "admin", + cmd, + write_concern=write_concern, + parse_write_concern_error=True, + session=s, + client=self.__database.client, + ) + + def distinct( + self, + key: str, + filter: Optional[Mapping[str, Any]] = None, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> List: + """Get a list of distinct values for `key` among all documents + in this collection. + + Raises :class:`TypeError` if `key` is not an instance of + :class:`str`. + + All optional distinct parameters should be passed as keyword arguments + to this method. Valid options include: + + - `maxTimeMS` (int): The maximum amount of time to allow the count + command to run, in milliseconds. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + + The :meth:`distinct` method obeys the :attr:`read_preference` of + this :class:`Collection`. + + :Parameters: + - `key`: name of the field for which we want to get the distinct + values + - `filter` (optional): A query document that specifies the documents + from which to retrieve the distinct values. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): See list of options above. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.4 + Support the `collation` option. + + """ + if not isinstance(key, str): + raise TypeError("key must be an instance of str") + cmd = SON([("distinct", self.__name), ("key", key)]) + if filter is not None: + if "query" in kwargs: + raise ConfigurationError("can't pass both filter and query") + kwargs["query"] = filter + collation = validate_collation_or_none(kwargs.pop("collation", None)) + cmd.update(kwargs) + if comment is not None: + cmd["comment"] = comment + + def _cmd( + session: Optional[ClientSession], + server: Server, + conn: Connection, + read_preference: Optional[_ServerMode], + ) -> List: + return self._command( + conn, + cmd, + read_preference=read_preference, + read_concern=self.read_concern, + collation=collation, + session=session, + user_fields={"values": 1}, + )["values"] + + return self._retryable_non_cursor_read(_cmd, session) + + def _write_concern_for_cmd( + self, cmd: Mapping[str, Any], session: Optional[ClientSession] + ) -> WriteConcern: + raw_wc = cmd.get("writeConcern") + if raw_wc is not None: + return WriteConcern(**raw_wc) + else: + return self._write_concern_for(session) + + def __find_and_modify( + self, + filter: Mapping[str, Any], + projection: Optional[Union[Mapping[str, Any], Iterable[str]]], + sort: Optional[_IndexList], + upsert: Optional[bool] = None, + return_document: bool = ReturnDocument.BEFORE, + array_filters: Optional[Sequence[Mapping[str, Any]]] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping] = None, + **kwargs: Any, + ) -> Any: + """Internal findAndModify helper.""" + common.validate_is_mapping("filter", filter) + if not isinstance(return_document, bool): + raise ValueError( + "return_document must be ReturnDocument.BEFORE or ReturnDocument.AFTER" + ) + collation = validate_collation_or_none(kwargs.pop("collation", None)) + cmd = SON([("findAndModify", self.__name), ("query", filter), ("new", return_document)]) + if let is not None: + common.validate_is_mapping("let", let) + cmd["let"] = let + cmd.update(kwargs) + if projection is not None: + cmd["fields"] = helpers._fields_list_to_dict(projection, "projection") + if sort is not None: + cmd["sort"] = helpers._index_document(sort) + if upsert is not None: + common.validate_boolean("upsert", upsert) + cmd["upsert"] = upsert + if hint is not None: + if not isinstance(hint, str): + hint = helpers._index_document(hint) # type: ignore[assignment] + + write_concern = self._write_concern_for_cmd(cmd, session) + + def _find_and_modify( + session: Optional[ClientSession], conn: Connection, retryable_write: bool + ) -> Any: + acknowledged = write_concern.acknowledged + if array_filters is not None: + if not acknowledged: + raise ConfigurationError( + "arrayFilters is unsupported for unacknowledged writes." + ) + cmd["arrayFilters"] = list(array_filters) + if hint is not None: + if conn.max_wire_version < 8: + raise ConfigurationError( + "Must be connected to MongoDB 4.2+ to use hint on find and modify commands." + ) + elif not acknowledged and conn.max_wire_version < 9: + raise ConfigurationError( + "Must be connected to MongoDB 4.4+ to use hint on unacknowledged find and modify commands." + ) + cmd["hint"] = hint + out = self._command( + conn, + cmd, + read_preference=ReadPreference.PRIMARY, + write_concern=write_concern, + collation=collation, + session=session, + retryable_write=retryable_write, + user_fields=_FIND_AND_MODIFY_DOC_FIELDS, + ) + _check_write_command_response(out) + + return out.get("value") + + return self.__database.client._retryable_write( + write_concern.acknowledged, _find_and_modify, session + ) + + def find_one_and_delete( + self, + filter: Mapping[str, Any], + projection: Optional[Union[Mapping[str, Any], Iterable[str]]] = None, + sort: Optional[_IndexList] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> _DocumentType: + """Finds a single document and deletes it, returning the document. + + >>> db.test.count_documents({'x': 1}) + 2 + >>> db.test.find_one_and_delete({'x': 1}) + {'x': 1, '_id': ObjectId('54f4e12bfba5220aa4d6dee8')} + >>> db.test.count_documents({'x': 1}) + 1 + + If multiple documents match *filter*, a *sort* can be applied. + + >>> for doc in db.test.find({'x': 1}): + ... print(doc) + ... + {'x': 1, '_id': 0} + {'x': 1, '_id': 1} + {'x': 1, '_id': 2} + >>> db.test.find_one_and_delete( + ... {'x': 1}, sort=[('_id', pymongo.DESCENDING)]) + {'x': 1, '_id': 2} + + The *projection* option can be used to limit the fields returned. + + >>> db.test.find_one_and_delete({'x': 1}, projection={'_id': False}) + {'x': 1} + + :Parameters: + - `filter`: A query that matches the document to delete. + - `projection` (optional): a list of field names that should be + returned in the result document or a mapping specifying the fields + to include or exclude. If `projection` is a list "_id" will + always be returned. Use a mapping to exclude fields from + the result (e.g. projection={'_id': False}). + - `sort` (optional): a list of (key, direction) pairs + specifying the sort order for the query. If multiple documents + match the query, they are sorted and the first is deleted. + - `hint` (optional): An index to use to support the query predicate + specified either by its string name, or in the same format as + passed to :meth:`~pymongo.collection.Collection.create_index` + (e.g. ``[('field', ASCENDING)]``). This option is only supported + on MongoDB 4.4 and above. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `let` (optional): Map of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. "$$var"). + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): additional command arguments can be passed + as keyword arguments (for example maxTimeMS can be used with + recent server versions). + + .. versionchanged:: 4.1 + Added ``let`` parameter. + .. versionchanged:: 3.11 + Added ``hint`` parameter. + .. versionchanged:: 3.6 + Added ``session`` parameter. + .. versionchanged:: 3.2 + Respects write concern. + + .. warning:: Starting in PyMongo 3.2, this command uses the + :class:`~pymongo.write_concern.WriteConcern` of this + :class:`~pymongo.collection.Collection` when connected to MongoDB >= + 3.2. Note that using an elevated write concern with this command may + be slower compared to using the default write concern. + + .. versionchanged:: 3.4 + Added the `collation` option. + .. versionadded:: 3.0 + """ + kwargs["remove"] = True + if comment is not None: + kwargs["comment"] = comment + return self.__find_and_modify( + filter, projection, sort, let=let, hint=hint, session=session, **kwargs + ) + + def find_one_and_replace( + self, + filter: Mapping[str, Any], + replacement: Mapping[str, Any], + projection: Optional[Union[Mapping[str, Any], Iterable[str]]] = None, + sort: Optional[_IndexList] = None, + upsert: bool = False, + return_document: bool = ReturnDocument.BEFORE, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> _DocumentType: + """Finds a single document and replaces it, returning either the + original or the replaced document. + + The :meth:`find_one_and_replace` method differs from + :meth:`find_one_and_update` by replacing the document matched by + *filter*, rather than modifying the existing document. + + >>> for doc in db.test.find({}): + ... print(doc) + ... + {'x': 1, '_id': 0} + {'x': 1, '_id': 1} + {'x': 1, '_id': 2} + >>> db.test.find_one_and_replace({'x': 1}, {'y': 1}) + {'x': 1, '_id': 0} + >>> for doc in db.test.find({}): + ... print(doc) + ... + {'y': 1, '_id': 0} + {'x': 1, '_id': 1} + {'x': 1, '_id': 2} + + :Parameters: + - `filter`: A query that matches the document to replace. + - `replacement`: The replacement document. + - `projection` (optional): A list of field names that should be + returned in the result document or a mapping specifying the fields + to include or exclude. If `projection` is a list "_id" will + always be returned. Use a mapping to exclude fields from + the result (e.g. projection={'_id': False}). + - `sort` (optional): a list of (key, direction) pairs + specifying the sort order for the query. If multiple documents + match the query, they are sorted and the first is replaced. + - `upsert` (optional): When ``True``, inserts a new document if no + document matches the query. Defaults to ``False``. + - `return_document`: If + :attr:`ReturnDocument.BEFORE` (the default), + returns the original document before it was replaced, or ``None`` + if no document matches. If + :attr:`ReturnDocument.AFTER`, returns the replaced + or inserted document. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.4 and above. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `let` (optional): Map of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. "$$var"). + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): additional command arguments can be passed + as keyword arguments (for example maxTimeMS can be used with + recent server versions). + + .. versionchanged:: 4.1 + Added ``let`` parameter. + .. versionchanged:: 3.11 + Added the ``hint`` option. + .. versionchanged:: 3.6 + Added ``session`` parameter. + .. versionchanged:: 3.4 + Added the ``collation`` option. + .. versionchanged:: 3.2 + Respects write concern. + + .. warning:: Starting in PyMongo 3.2, this command uses the + :class:`~pymongo.write_concern.WriteConcern` of this + :class:`~pymongo.collection.Collection` when connected to MongoDB >= + 3.2. Note that using an elevated write concern with this command may + be slower compared to using the default write concern. + + .. versionadded:: 3.0 + """ + common.validate_ok_for_replace(replacement) + kwargs["update"] = replacement + if comment is not None: + kwargs["comment"] = comment + return self.__find_and_modify( + filter, + projection, + sort, + upsert, + return_document, + let=let, + hint=hint, + session=session, + **kwargs, + ) + + def find_one_and_update( + self, + filter: Mapping[str, Any], + update: Union[Mapping[str, Any], _Pipeline], + projection: Optional[Union[Mapping[str, Any], Iterable[str]]] = None, + sort: Optional[_IndexList] = None, + upsert: bool = False, + return_document: bool = ReturnDocument.BEFORE, + array_filters: Optional[Sequence[Mapping[str, Any]]] = None, + hint: Optional[_IndexKeyHint] = None, + session: Optional[ClientSession] = None, + let: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> _DocumentType: + """Finds a single document and updates it, returning either the + original or the updated document. + + >>> db.test.find_one_and_update( + ... {'_id': 665}, {'$inc': {'count': 1}, '$set': {'done': True}}) + {'_id': 665, 'done': False, 'count': 25}} + + Returns ``None`` if no document matches the filter. + + >>> db.test.find_one_and_update( + ... {'_exists': False}, {'$inc': {'count': 1}}) + + When the filter matches, by default :meth:`find_one_and_update` + returns the original version of the document before the update was + applied. To return the updated (or inserted in the case of + *upsert*) version of the document instead, use the *return_document* + option. + + >>> from pymongo import ReturnDocument + >>> db.example.find_one_and_update( + ... {'_id': 'userid'}, + ... {'$inc': {'seq': 1}}, + ... return_document=ReturnDocument.AFTER) + {'_id': 'userid', 'seq': 1} + + You can limit the fields returned with the *projection* option. + + >>> db.example.find_one_and_update( + ... {'_id': 'userid'}, + ... {'$inc': {'seq': 1}}, + ... projection={'seq': True, '_id': False}, + ... return_document=ReturnDocument.AFTER) + {'seq': 2} + + The *upsert* option can be used to create the document if it doesn't + already exist. + + >>> db.example.delete_many({}).deleted_count + 1 + >>> db.example.find_one_and_update( + ... {'_id': 'userid'}, + ... {'$inc': {'seq': 1}}, + ... projection={'seq': True, '_id': False}, + ... upsert=True, + ... return_document=ReturnDocument.AFTER) + {'seq': 1} + + If multiple documents match *filter*, a *sort* can be applied. + + >>> for doc in db.test.find({'done': True}): + ... print(doc) + ... + {'_id': 665, 'done': True, 'result': {'count': 26}} + {'_id': 701, 'done': True, 'result': {'count': 17}} + >>> db.test.find_one_and_update( + ... {'done': True}, + ... {'$set': {'final': True}}, + ... sort=[('_id', pymongo.DESCENDING)]) + {'_id': 701, 'done': True, 'result': {'count': 17}} + + :Parameters: + - `filter`: A query that matches the document to update. + - `update`: The update operations to apply. + - `projection` (optional): A list of field names that should be + returned in the result document or a mapping specifying the fields + to include or exclude. If `projection` is a list "_id" will + always be returned. Use a dict to exclude fields from + the result (e.g. projection={'_id': False}). + - `sort` (optional): a list of (key, direction) pairs + specifying the sort order for the query. If multiple documents + match the query, they are sorted and the first is updated. + - `upsert` (optional): When ``True``, inserts a new document if no + document matches the query. Defaults to ``False``. + - `return_document`: If + :attr:`ReturnDocument.BEFORE` (the default), + returns the original document before it was updated. If + :attr:`ReturnDocument.AFTER`, returns the updated + or inserted document. + - `array_filters` (optional): A list of filters specifying which + array elements an update should apply. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.4 and above. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `let` (optional): Map of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. "$$var"). + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): additional command arguments can be passed + as keyword arguments (for example maxTimeMS can be used with + recent server versions). + + .. versionchanged:: 3.11 + Added the ``hint`` option. + .. versionchanged:: 3.9 + Added the ability to accept a pipeline as the ``update``. + .. versionchanged:: 3.6 + Added the ``array_filters`` and ``session`` options. + .. versionchanged:: 3.4 + Added the ``collation`` option. + .. versionchanged:: 3.2 + Respects write concern. + + .. warning:: Starting in PyMongo 3.2, this command uses the + :class:`~pymongo.write_concern.WriteConcern` of this + :class:`~pymongo.collection.Collection` when connected to MongoDB >= + 3.2. Note that using an elevated write concern with this command may + be slower compared to using the default write concern. + + .. versionadded:: 3.0 + """ + common.validate_ok_for_update(update) + common.validate_list_or_none("array_filters", array_filters) + kwargs["update"] = update + if comment is not None: + kwargs["comment"] = comment + return self.__find_and_modify( + filter, + projection, + sort, + upsert, + return_document, + array_filters, + hint=hint, + let=let, + session=session, + **kwargs, + ) + + # See PYTHON-3084. + __iter__ = None + + def __next__(self) -> NoReturn: + raise TypeError("'Collection' object is not iterable") + + next = __next__ + + def __call__(self, *args: Any, **kwargs: Any) -> NoReturn: + """This is only here so that some API misusages are easier to debug.""" + if "." not in self.__name: + raise TypeError( + "'Collection' object is not callable. If you " + "meant to call the '%s' method on a 'Database' " + "object it is failing because no such method " + "exists." % self.__name + ) + raise TypeError( + "'Collection' object is not callable. If you meant to " + "call the '%s' method on a 'Collection' object it is " + "failing because no such method exists." % self.__name.split(".")[-1] + ) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/command_cursor.py b/backend/test/lib/python3.8/site-packages/pymongo/command_cursor.py new file mode 100644 index 0000000000000000000000000000000000000000..777b88b083cde499bad0e57ced2ec13aa32f9f04 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/command_cursor.py @@ -0,0 +1,401 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""CommandCursor class to iterate over command results.""" +from __future__ import annotations + +from collections import deque +from typing import ( + TYPE_CHECKING, + Any, + Generic, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Sequence, + Union, +) + +from bson import CodecOptions, _convert_raw_document_lists_to_streams +from pymongo.cursor import _CURSOR_CLOSED_ERRORS, _ConnectionManager +from pymongo.errors import ConnectionFailure, InvalidOperation, OperationFailure +from pymongo.message import _CursorAddress, _GetMore, _OpMsg, _OpReply, _RawBatchGetMore +from pymongo.response import PinnedResponse +from pymongo.typings import _Address, _DocumentOut, _DocumentType + +if TYPE_CHECKING: + from pymongo.client_session import ClientSession + from pymongo.collection import Collection + from pymongo.pool import Connection + + +class CommandCursor(Generic[_DocumentType]): + """A cursor / iterator over command cursors.""" + + _getmore_class = _GetMore + + def __init__( + self, + collection: Collection[_DocumentType], + cursor_info: Mapping[str, Any], + address: Optional[_Address], + batch_size: int = 0, + max_await_time_ms: Optional[int] = None, + session: Optional[ClientSession] = None, + explicit_session: bool = False, + comment: Any = None, + ) -> None: + """Create a new command cursor.""" + self.__sock_mgr: Any = None + self.__collection: Collection[_DocumentType] = collection + self.__id = cursor_info["id"] + self.__data = deque(cursor_info["firstBatch"]) + self.__postbatchresumetoken: Optional[Mapping[str, Any]] = cursor_info.get( + "postBatchResumeToken" + ) + self.__address = address + self.__batch_size = batch_size + self.__max_await_time_ms = max_await_time_ms + self.__session = session + self.__explicit_session = explicit_session + self.__killed = self.__id == 0 + self.__comment = comment + if self.__killed: + self.__end_session(True) + + if "ns" in cursor_info: + self.__ns = cursor_info["ns"] + else: + self.__ns = collection.full_name + + self.batch_size(batch_size) + + if not isinstance(max_await_time_ms, int) and max_await_time_ms is not None: + raise TypeError("max_await_time_ms must be an integer or None") + + def __del__(self) -> None: + self.__die() + + def __die(self, synchronous: bool = False) -> None: + """Closes this cursor.""" + already_killed = self.__killed + self.__killed = True + if self.__id and not already_killed: + cursor_id = self.__id + assert self.__address is not None + address = _CursorAddress(self.__address, self.__ns) + else: + # Skip killCursors. + cursor_id = 0 + address = None + self.__collection.database.client._cleanup_cursor( + synchronous, + cursor_id, + address, + self.__sock_mgr, + self.__session, + self.__explicit_session, + ) + if not self.__explicit_session: + self.__session = None + self.__sock_mgr = None + + def __end_session(self, synchronous: bool) -> None: + if self.__session and not self.__explicit_session: + self.__session._end_session(lock=synchronous) + self.__session = None + + def close(self) -> None: + """Explicitly close / kill this cursor.""" + self.__die(True) + + def batch_size(self, batch_size: int) -> "CommandCursor[_DocumentType]": + """Limits the number of documents returned in one batch. Each batch + requires a round trip to the server. It can be adjusted to optimize + performance and limit data transfer. + + .. note:: batch_size can not override MongoDB's internal limits on the + amount of data it will return to the client in a single batch (i.e + if you set batch size to 1,000,000,000, MongoDB will currently only + return 4-16MB of results per batch). + + Raises :exc:`TypeError` if `batch_size` is not an integer. + Raises :exc:`ValueError` if `batch_size` is less than ``0``. + + :Parameters: + - `batch_size`: The size of each batch of results requested. + """ + if not isinstance(batch_size, int): + raise TypeError("batch_size must be an integer") + if batch_size < 0: + raise ValueError("batch_size must be >= 0") + + self.__batch_size = batch_size == 1 and 2 or batch_size + return self + + def _has_next(self) -> bool: + """Returns `True` if the cursor has documents remaining from the + previous batch. + """ + return len(self.__data) > 0 + + @property + def _post_batch_resume_token(self) -> Optional[Mapping[str, Any]]: + """Retrieve the postBatchResumeToken from the response to a + changeStream aggregate or getMore. + """ + return self.__postbatchresumetoken + + def _maybe_pin_connection(self, conn: Connection) -> None: + client = self.__collection.database.client + if not client._should_pin_cursor(self.__session): + return + if not self.__sock_mgr: + conn.pin_cursor() + conn_mgr = _ConnectionManager(conn, False) + # Ensure the connection gets returned when the entire result is + # returned in the first batch. + if self.__id == 0: + conn_mgr.close() + else: + self.__sock_mgr = conn_mgr + + def __send_message(self, operation: _GetMore) -> None: + """Send a getmore message and handle the response.""" + client = self.__collection.database.client + try: + response = client._run_operation( + operation, self._unpack_response, address=self.__address + ) + except OperationFailure as exc: + if exc.code in _CURSOR_CLOSED_ERRORS: + # Don't send killCursors because the cursor is already closed. + self.__killed = True + # Return the session and pinned connection, if necessary. + self.close() + raise + except ConnectionFailure: + # Don't send killCursors because the cursor is already closed. + self.__killed = True + # Return the session and pinned connection, if necessary. + self.close() + raise + except Exception: + self.close() + raise + + if isinstance(response, PinnedResponse): + if not self.__sock_mgr: + self.__sock_mgr = _ConnectionManager(response.conn, response.more_to_come) + if response.from_command: + cursor = response.docs[0]["cursor"] + documents = cursor["nextBatch"] + self.__postbatchresumetoken = cursor.get("postBatchResumeToken") + self.__id = cursor["id"] + else: + documents = response.docs + assert isinstance(response.data, _OpReply) + self.__id = response.data.cursor_id + + if self.__id == 0: + self.close() + self.__data = deque(documents) + + def _unpack_response( + self, + response: Union[_OpReply, _OpMsg], + cursor_id: Optional[int], + codec_options: CodecOptions[Mapping[str, Any]], + user_fields: Optional[Mapping[str, Any]] = None, + legacy_response: bool = False, + ) -> Sequence[_DocumentOut]: + return response.unpack_response(cursor_id, codec_options, user_fields, legacy_response) + + def _refresh(self) -> int: + """Refreshes the cursor with more data from the server. + + Returns the length of self.__data after refresh. Will exit early if + self.__data is already non-empty. Raises OperationFailure when the + cursor cannot be refreshed due to an error on the query. + """ + if len(self.__data) or self.__killed: + return len(self.__data) + + if self.__id: # Get More + dbname, collname = self.__ns.split(".", 1) + read_pref = self.__collection._read_preference_for(self.session) + self.__send_message( + self._getmore_class( + dbname, + collname, + self.__batch_size, + self.__id, + self.__collection.codec_options, + read_pref, + self.__session, + self.__collection.database.client, + self.__max_await_time_ms, + self.__sock_mgr, + False, + self.__comment, + ) + ) + else: # Cursor id is zero nothing else to return + self.__die(True) + + return len(self.__data) + + @property + def alive(self) -> bool: + """Does this cursor have the potential to return more data? + + Even if :attr:`alive` is ``True``, :meth:`next` can raise + :exc:`StopIteration`. Best to use a for loop:: + + for doc in collection.aggregate(pipeline): + print(doc) + + .. note:: :attr:`alive` can be True while iterating a cursor from + a failed server. In this case :attr:`alive` will return False after + :meth:`next` fails to retrieve the next batch of results from the + server. + """ + return bool(len(self.__data) or (not self.__killed)) + + @property + def cursor_id(self) -> int: + """Returns the id of the cursor.""" + return self.__id + + @property + def address(self) -> Optional[_Address]: + """The (host, port) of the server used, or None. + + .. versionadded:: 3.0 + """ + return self.__address + + @property + def session(self) -> Optional[ClientSession]: + """The cursor's :class:`~pymongo.client_session.ClientSession`, or None. + + .. versionadded:: 3.6 + """ + if self.__explicit_session: + return self.__session + return None + + def __iter__(self) -> Iterator[_DocumentType]: + return self + + def next(self) -> _DocumentType: + """Advance the cursor.""" + # Block until a document is returnable. + while self.alive: + doc = self._try_next(True) + if doc is not None: + return doc + + raise StopIteration + + __next__ = next + + def _try_next(self, get_more_allowed: bool) -> Optional[_DocumentType]: + """Advance the cursor blocking for at most one getMore command.""" + if not len(self.__data) and not self.__killed and get_more_allowed: + self._refresh() + if len(self.__data): + return self.__data.popleft() + else: + return None + + def try_next(self) -> Optional[_DocumentType]: + """Advance the cursor without blocking indefinitely. + + This method returns the next document without waiting + indefinitely for data. + + If no document is cached locally then this method runs a single + getMore command. If the getMore yields any documents, the next + document is returned, otherwise, if the getMore returns no documents + (because there is no additional data) then ``None`` is returned. + + :Returns: + The next document or ``None`` when no document is available + after running a single getMore or when the cursor is closed. + + .. versionadded:: 4.5 + """ + return self._try_next(get_more_allowed=True) + + def __enter__(self) -> "CommandCursor[_DocumentType]": + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + self.close() + + +class RawBatchCommandCursor(CommandCursor, Generic[_DocumentType]): + _getmore_class = _RawBatchGetMore + + def __init__( + self, + collection: Collection[_DocumentType], + cursor_info: Mapping[str, Any], + address: Optional[_Address], + batch_size: int = 0, + max_await_time_ms: Optional[int] = None, + session: Optional[ClientSession] = None, + explicit_session: bool = False, + comment: Any = None, + ) -> None: + """Create a new cursor / iterator over raw batches of BSON data. + + Should not be called directly by application developers - + see :meth:`~pymongo.collection.Collection.aggregate_raw_batches` + instead. + + .. seealso:: The MongoDB documentation on `cursors <https://dochub.mongodb.org/core/cursors>`_. + """ + assert not cursor_info.get("firstBatch") + super().__init__( + collection, + cursor_info, + address, + batch_size, + max_await_time_ms, + session, + explicit_session, + comment, + ) + + def _unpack_response( # type: ignore[override] + self, + response: Union[_OpReply, _OpMsg], + cursor_id: Optional[int], + codec_options: CodecOptions, + user_fields: Optional[Mapping[str, Any]] = None, + legacy_response: bool = False, + ) -> List[Mapping[str, Any]]: + raw_response = response.raw_response(cursor_id, user_fields=user_fields) + if not legacy_response: + # OP_MSG returns firstBatch/nextBatch documents as a BSON array + # Re-assemble the array of documents into a document stream + _convert_raw_document_lists_to_streams(raw_response[0]) + return raw_response # type: ignore[return-value] + + def __getitem__(self, index: int) -> NoReturn: + raise InvalidOperation("Cannot call __getitem__ on RawBatchCursor") diff --git a/backend/test/lib/python3.8/site-packages/pymongo/common.py b/backend/test/lib/python3.8/site-packages/pymongo/common.py new file mode 100644 index 0000000000000000000000000000000000000000..a791a5e44fd80dbf82a590cef52da5304357c7d8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/common.py @@ -0,0 +1,1048 @@ +# Copyright 2011-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + + +"""Functions and classes common to multiple pymongo modules.""" +from __future__ import annotations + +import datetime +import inspect +import warnings +from collections import OrderedDict, abc +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Sequence, + Tuple, + Type, + Union, + overload, +) +from urllib.parse import unquote_plus + +from bson import SON +from bson.binary import UuidRepresentation +from bson.codec_options import CodecOptions, DatetimeConversion, TypeRegistry +from bson.raw_bson import RawBSONDocument +from pymongo.auth import MECHANISMS +from pymongo.compression_support import ( + validate_compressors, + validate_zlib_compression_level, +) +from pymongo.driver_info import DriverInfo +from pymongo.errors import ConfigurationError +from pymongo.monitoring import _validate_event_listeners +from pymongo.read_concern import ReadConcern +from pymongo.read_preferences import _MONGOS_MODES, _ServerMode +from pymongo.server_api import ServerApi +from pymongo.write_concern import DEFAULT_WRITE_CONCERN, WriteConcern, validate_boolean + +if TYPE_CHECKING: + from pymongo.client_session import ClientSession + + +ORDERED_TYPES: Sequence[Type] = (SON, OrderedDict) + +# Defaults until we connect to a server and get updated limits. +MAX_BSON_SIZE = 16 * (1024**2) +MAX_MESSAGE_SIZE: int = 2 * MAX_BSON_SIZE +MIN_WIRE_VERSION = 0 +MAX_WIRE_VERSION = 0 +MAX_WRITE_BATCH_SIZE = 1000 + +# What this version of PyMongo supports. +MIN_SUPPORTED_SERVER_VERSION = "3.6" +MIN_SUPPORTED_WIRE_VERSION = 6 +MAX_SUPPORTED_WIRE_VERSION = 21 + +# Frequency to call hello on servers, in seconds. +HEARTBEAT_FREQUENCY = 10 + +# Frequency to clean up unclosed cursors, in seconds. +# See MongoClient._process_kill_cursors. +KILL_CURSOR_FREQUENCY = 1 + +# Frequency to process events queue, in seconds. +EVENTS_QUEUE_FREQUENCY = 1 + +# How long to wait, in seconds, for a suitable server to be found before +# aborting an operation. For example, if the client attempts an insert +# during a replica set election, SERVER_SELECTION_TIMEOUT governs the +# longest it is willing to wait for a new primary to be found. +SERVER_SELECTION_TIMEOUT = 30 + +# Spec requires at least 500ms between hello calls. +MIN_HEARTBEAT_INTERVAL = 0.5 + +# Spec requires at least 60s between SRV rescans. +MIN_SRV_RESCAN_INTERVAL = 60 + +# Default connectTimeout in seconds. +CONNECT_TIMEOUT = 20.0 + +# Default value for maxPoolSize. +MAX_POOL_SIZE = 100 + +# Default value for minPoolSize. +MIN_POOL_SIZE = 0 + +# The maximum number of concurrent connection creation attempts per pool. +MAX_CONNECTING = 2 + +# Default value for maxIdleTimeMS. +MAX_IDLE_TIME_MS: Optional[int] = None + +# Default value for maxIdleTimeMS in seconds. +MAX_IDLE_TIME_SEC: Optional[int] = None + +# Default value for waitQueueTimeoutMS in seconds. +WAIT_QUEUE_TIMEOUT: Optional[int] = None + +# Default value for localThresholdMS. +LOCAL_THRESHOLD_MS = 15 + +# Default value for retryWrites. +RETRY_WRITES = True + +# Default value for retryReads. +RETRY_READS = True + +# The error code returned when a command doesn't exist. +COMMAND_NOT_FOUND_CODES: Sequence[int] = (59,) + +# Error codes to ignore if GridFS calls createIndex on a secondary +UNAUTHORIZED_CODES: Sequence[int] = (13, 16547, 16548) + +# Maximum number of sessions to send in a single endSessions command. +# From the driver sessions spec. +_MAX_END_SESSIONS = 10000 + +# Default value for srvServiceName +SRV_SERVICE_NAME = "mongodb" + + +def partition_node(node: str) -> Tuple[str, int]: + """Split a host:port string into (host, int(port)) pair.""" + host = node + port = 27017 + idx = node.rfind(":") + if idx != -1: + host, port = node[:idx], int(node[idx + 1 :]) + if host.startswith("["): + host = host[1:-1] + return host, port + + +def clean_node(node: str) -> Tuple[str, int]: + """Split and normalize a node name from a hello response.""" + host, port = partition_node(node) + + # Normalize hostname to lowercase, since DNS is case-insensitive: + # http://tools.ietf.org/html/rfc4343 + # This prevents useless rediscovery if "foo.com" is in the seed list but + # "FOO.com" is in the hello response. + return host.lower(), port + + +def raise_config_error(key: str, dummy: Any) -> NoReturn: + """Raise ConfigurationError with the given key name.""" + raise ConfigurationError(f"Unknown option {key}") + + +# Mapping of URI uuid representation options to valid subtypes. +_UUID_REPRESENTATIONS = { + "unspecified": UuidRepresentation.UNSPECIFIED, + "standard": UuidRepresentation.STANDARD, + "pythonLegacy": UuidRepresentation.PYTHON_LEGACY, + "javaLegacy": UuidRepresentation.JAVA_LEGACY, + "csharpLegacy": UuidRepresentation.CSHARP_LEGACY, +} + + +def validate_boolean_or_string(option: str, value: Any) -> bool: + """Validates that value is True, False, 'true', or 'false'.""" + if isinstance(value, str): + if value not in ("true", "false"): + raise ValueError(f"The value of {option} must be 'true' or 'false'") + return value == "true" + return validate_boolean(option, value) + + +def validate_integer(option: str, value: Any) -> int: + """Validates that 'value' is an integer (or basestring representation).""" + if isinstance(value, int): + return value + elif isinstance(value, str): + try: + return int(value) + except ValueError: + raise ValueError(f"The value of {option} must be an integer") + raise TypeError(f"Wrong type for {option}, value must be an integer") + + +def validate_positive_integer(option: str, value: Any) -> int: + """Validate that 'value' is a positive integer, which does not include 0.""" + val = validate_integer(option, value) + if val <= 0: + raise ValueError(f"The value of {option} must be a positive integer") + return val + + +def validate_non_negative_integer(option: str, value: Any) -> int: + """Validate that 'value' is a positive integer or 0.""" + val = validate_integer(option, value) + if val < 0: + raise ValueError(f"The value of {option} must be a non negative integer") + return val + + +def validate_readable(option: str, value: Any) -> Optional[str]: + """Validates that 'value' is file-like and readable.""" + if value is None: + return value + # First make sure its a string py3.3 open(True, 'r') succeeds + # Used in ssl cert checking due to poor ssl module error reporting + value = validate_string(option, value) + open(value).close() + return value + + +def validate_positive_integer_or_none(option: str, value: Any) -> Optional[int]: + """Validate that 'value' is a positive integer or None.""" + if value is None: + return value + return validate_positive_integer(option, value) + + +def validate_non_negative_integer_or_none(option: str, value: Any) -> Optional[int]: + """Validate that 'value' is a positive integer or 0 or None.""" + if value is None: + return value + return validate_non_negative_integer(option, value) + + +def validate_string(option: str, value: Any) -> str: + """Validates that 'value' is an instance of `str`.""" + if isinstance(value, str): + return value + raise TypeError(f"Wrong type for {option}, value must be an instance of str") + + +def validate_string_or_none(option: str, value: Any) -> Optional[str]: + """Validates that 'value' is an instance of `basestring` or `None`.""" + if value is None: + return value + return validate_string(option, value) + + +def validate_int_or_basestring(option: str, value: Any) -> Union[int, str]: + """Validates that 'value' is an integer or string.""" + if isinstance(value, int): + return value + elif isinstance(value, str): + try: + return int(value) + except ValueError: + return value + raise TypeError(f"Wrong type for {option}, value must be an integer or a string") + + +def validate_non_negative_int_or_basestring(option: Any, value: Any) -> Union[int, str]: + """Validates that 'value' is an integer or string.""" + if isinstance(value, int): + return value + elif isinstance(value, str): + try: + val = int(value) + except ValueError: + return value + return validate_non_negative_integer(option, val) + raise TypeError(f"Wrong type for {option}, value must be an non negative integer or a string") + + +def validate_positive_float(option: str, value: Any) -> float: + """Validates that 'value' is a float, or can be converted to one, and is + positive. + """ + errmsg = f"{option} must be an integer or float" + try: + value = float(value) + except ValueError: + raise ValueError(errmsg) + except TypeError: + raise TypeError(errmsg) + + # float('inf') doesn't work in 2.4 or 2.5 on Windows, so just cap floats at + # one billion - this is a reasonable approximation for infinity + if not 0 < value < 1e9: + raise ValueError(f"{option} must be greater than 0 and less than one billion") + return value + + +def validate_positive_float_or_zero(option: str, value: Any) -> float: + """Validates that 'value' is 0 or a positive float, or can be converted to + 0 or a positive float. + """ + if value == 0 or value == "0": + return 0 + return validate_positive_float(option, value) + + +def validate_timeout_or_none(option: str, value: Any) -> Optional[float]: + """Validates a timeout specified in milliseconds returning + a value in floating point seconds. + """ + if value is None: + return value + return validate_positive_float(option, value) / 1000.0 + + +def validate_timeout_or_zero(option: str, value: Any) -> float: + """Validates a timeout specified in milliseconds returning + a value in floating point seconds for the case where None is an error + and 0 is valid. Setting the timeout to nothing in the URI string is a + config error. + """ + if value is None: + raise ConfigurationError(f"{option} cannot be None") + if value == 0 or value == "0": + return 0 + return validate_positive_float(option, value) / 1000.0 + + +def validate_timeout_or_none_or_zero(option: Any, value: Any) -> Optional[float]: + """Validates a timeout specified in milliseconds returning + a value in floating point seconds. value=0 and value="0" are treated the + same as value=None which means unlimited timeout. + """ + if value is None or value == 0 or value == "0": + return None + return validate_positive_float(option, value) / 1000.0 + + +def validate_timeoutms(option: Any, value: Any) -> Optional[float]: + """Validates a timeout specified in milliseconds returning + a value in floating point seconds. + """ + if value is None: + return None + return validate_positive_float_or_zero(option, value) / 1000.0 + + +def validate_max_staleness(option: str, value: Any) -> int: + """Validates maxStalenessSeconds according to the Max Staleness Spec.""" + if value == -1 or value == "-1": + # Default: No maximum staleness. + return -1 + return validate_positive_integer(option, value) + + +def validate_read_preference(dummy: Any, value: Any) -> _ServerMode: + """Validate a read preference.""" + if not isinstance(value, _ServerMode): + raise TypeError(f"{value!r} is not a read preference.") + return value + + +def validate_read_preference_mode(dummy: Any, value: Any) -> _ServerMode: + """Validate read preference mode for a MongoClient. + + .. versionchanged:: 3.5 + Returns the original ``value`` instead of the validated read preference + mode. + """ + if value not in _MONGOS_MODES: + raise ValueError(f"{value} is not a valid read preference") + return value + + +def validate_auth_mechanism(option: str, value: Any) -> str: + """Validate the authMechanism URI option.""" + if value not in MECHANISMS: + raise ValueError(f"{option} must be in {tuple(MECHANISMS)}") + return value + + +def validate_uuid_representation(dummy: Any, value: Any) -> int: + """Validate the uuid representation option selected in the URI.""" + try: + return _UUID_REPRESENTATIONS[value] + except KeyError: + raise ValueError( + "{} is an invalid UUID representation. " + "Must be one of " + "{}".format(value, tuple(_UUID_REPRESENTATIONS)) + ) + + +def validate_read_preference_tags(name: str, value: Any) -> List[Dict[str, str]]: + """Parse readPreferenceTags if passed as a client kwarg.""" + if not isinstance(value, list): + value = [value] + + tag_sets: List = [] + for tag_set in value: + if tag_set == "": + tag_sets.append({}) + continue + try: + tags = {} + for tag in tag_set.split(","): + key, val = tag.split(":") + tags[unquote_plus(key)] = unquote_plus(val) + tag_sets.append(tags) + except Exception: + raise ValueError(f"{tag_set!r} not a valid value for {name}") + return tag_sets + + +_MECHANISM_PROPS = frozenset( + [ + "SERVICE_NAME", + "CANONICALIZE_HOST_NAME", + "SERVICE_REALM", + "AWS_SESSION_TOKEN", + "PROVIDER_NAME", + ] +) + + +def validate_auth_mechanism_properties(option: str, value: Any) -> Dict[str, Union[bool, str]]: + """Validate authMechanismProperties.""" + props: Dict[str, Any] = {} + if not isinstance(value, str): + if not isinstance(value, dict): + raise ValueError("Auth mechanism properties must be given as a string or a dictionary") + for key, value in value.items(): + if isinstance(value, str): + props[key] = value + elif isinstance(value, bool): + props[key] = str(value).lower() + elif key in ["allowed_hosts"] and isinstance(value, list): + props[key] = value + elif inspect.isfunction(value): + signature = inspect.signature(value) + if key == "request_token_callback": + expected_params = 2 + elif key == "refresh_token_callback": + expected_params = 2 + else: + raise ValueError(f"Unrecognized Auth mechanism function {key}") + if len(signature.parameters) != expected_params: + msg = f"{key} must accept {expected_params} parameters" + raise ValueError(msg) + props[key] = value + else: + raise ValueError( + "Auth mechanism property values must be strings or callback functions" + ) + return props + + value = validate_string(option, value) + for opt in value.split(","): + try: + key, val = opt.split(":") + except ValueError: + # Try not to leak the token. + if "AWS_SESSION_TOKEN" in opt: + opt = ( + "AWS_SESSION_TOKEN:<redacted token>, did you forget " + "to percent-escape the token with quote_plus?" + ) + raise ValueError( + "auth mechanism properties must be " + "key:value pairs like SERVICE_NAME:" + "mongodb, not {}.".format(opt) + ) + if key not in _MECHANISM_PROPS: + raise ValueError( + "{} is not a supported auth " + "mechanism property. Must be one of " + "{}.".format(key, tuple(_MECHANISM_PROPS)) + ) + if key == "CANONICALIZE_HOST_NAME": + props[key] = validate_boolean_or_string(key, val) + else: + props[key] = unquote_plus(val) + + return props + + +def validate_document_class( + option: str, value: Any +) -> Union[Type[MutableMapping], Type[RawBSONDocument]]: + """Validate the document_class option.""" + # issubclass can raise TypeError for generic aliases like SON[str, Any]. + # In that case we can use the base class for the comparison. + is_mapping = False + try: + is_mapping = issubclass(value, abc.MutableMapping) + except TypeError: + if hasattr(value, "__origin__"): + is_mapping = issubclass(value.__origin__, abc.MutableMapping) + if not is_mapping and not issubclass(value, RawBSONDocument): + raise TypeError( + "{} must be dict, bson.son.SON, " + "bson.raw_bson.RawBSONDocument, or a " + "subclass of collections.MutableMapping".format(option) + ) + return value + + +def validate_type_registry(option: Any, value: Any) -> Optional[TypeRegistry]: + """Validate the type_registry option.""" + if value is not None and not isinstance(value, TypeRegistry): + raise TypeError(f"{option} must be an instance of {TypeRegistry}") + return value + + +def validate_list(option: str, value: Any) -> List: + """Validates that 'value' is a list.""" + if not isinstance(value, list): + raise TypeError(f"{option} must be a list") + return value + + +def validate_list_or_none(option: Any, value: Any) -> Optional[List]: + """Validates that 'value' is a list or None.""" + if value is None: + return value + return validate_list(option, value) + + +def validate_list_or_mapping(option: Any, value: Any) -> None: + """Validates that 'value' is a list or a document.""" + if not isinstance(value, (abc.Mapping, list)): + raise TypeError( + "{} must either be a list or an instance of dict, " + "bson.son.SON, or any other type that inherits from " + "collections.Mapping".format(option) + ) + + +def validate_is_mapping(option: str, value: Any) -> None: + """Validate the type of method arguments that expect a document.""" + if not isinstance(value, abc.Mapping): + raise TypeError( + "{} must be an instance of dict, bson.son.SON, or " + "any other type that inherits from " + "collections.Mapping".format(option) + ) + + +def validate_is_document_type(option: str, value: Any) -> None: + """Validate the type of method arguments that expect a MongoDB document.""" + if not isinstance(value, (abc.MutableMapping, RawBSONDocument)): + raise TypeError( + "{} must be an instance of dict, bson.son.SON, " + "bson.raw_bson.RawBSONDocument, or " + "a type that inherits from " + "collections.MutableMapping".format(option) + ) + + +def validate_appname_or_none(option: str, value: Any) -> Optional[str]: + """Validate the appname option.""" + if value is None: + return value + validate_string(option, value) + # We need length in bytes, so encode utf8 first. + if len(value.encode("utf-8")) > 128: + raise ValueError(f"{option} must be <= 128 bytes") + return value + + +def validate_driver_or_none(option: Any, value: Any) -> Optional[DriverInfo]: + """Validate the driver keyword arg.""" + if value is None: + return value + if not isinstance(value, DriverInfo): + raise TypeError(f"{option} must be an instance of DriverInfo") + return value + + +def validate_server_api_or_none(option: Any, value: Any) -> Optional[ServerApi]: + """Validate the server_api keyword arg.""" + if value is None: + return value + if not isinstance(value, ServerApi): + raise TypeError(f"{option} must be an instance of ServerApi") + return value + + +def validate_is_callable_or_none(option: Any, value: Any) -> Optional[Callable]: + """Validates that 'value' is a callable.""" + if value is None: + return value + if not callable(value): + raise ValueError(f"{option} must be a callable") + return value + + +def validate_ok_for_replace(replacement: Mapping[str, Any]) -> None: + """Validate a replacement document.""" + validate_is_mapping("replacement", replacement) + # Replacement can be {} + if replacement and not isinstance(replacement, RawBSONDocument): + first = next(iter(replacement)) + if first.startswith("$"): + raise ValueError("replacement can not include $ operators") + + +def validate_ok_for_update(update: Any) -> None: + """Validate an update document.""" + validate_list_or_mapping("update", update) + # Update cannot be {}. + if not update: + raise ValueError("update cannot be empty") + + is_document = not isinstance(update, list) + first = next(iter(update)) + if is_document and not first.startswith("$"): + raise ValueError("update only works with $ operators") + + +_UNICODE_DECODE_ERROR_HANDLERS = frozenset(["strict", "replace", "ignore"]) + + +def validate_unicode_decode_error_handler(dummy: Any, value: str) -> str: + """Validate the Unicode decode error handler option of CodecOptions.""" + if value not in _UNICODE_DECODE_ERROR_HANDLERS: + raise ValueError( + "{} is an invalid Unicode decode error handler. " + "Must be one of " + "{}".format(value, tuple(_UNICODE_DECODE_ERROR_HANDLERS)) + ) + return value + + +def validate_tzinfo(dummy: Any, value: Any) -> Optional[datetime.tzinfo]: + """Validate the tzinfo option""" + if value is not None and not isinstance(value, datetime.tzinfo): + raise TypeError("%s must be an instance of datetime.tzinfo" % value) + return value + + +def validate_auto_encryption_opts_or_none(option: Any, value: Any) -> Optional[Any]: + """Validate the driver keyword arg.""" + if value is None: + return value + from pymongo.encryption_options import AutoEncryptionOpts + + if not isinstance(value, AutoEncryptionOpts): + raise TypeError(f"{option} must be an instance of AutoEncryptionOpts") + + return value + + +def validate_datetime_conversion(option: Any, value: Any) -> Optional[DatetimeConversion]: + """Validate a DatetimeConversion string.""" + if value is None: + return DatetimeConversion.DATETIME + + if isinstance(value, str): + if value.isdigit(): + return DatetimeConversion(int(value)) + return DatetimeConversion[value] + elif isinstance(value, int): + return DatetimeConversion(value) + + raise TypeError(f"{option} must be a str or int representing DatetimeConversion") + + +# Dictionary where keys are the names of public URI options, and values +# are lists of aliases for that option. +URI_OPTIONS_ALIAS_MAP: Dict[str, List[str]] = { + "tls": ["ssl"], +} + +# Dictionary where keys are the names of URI options, and values +# are functions that validate user-input values for that option. If an option +# alias uses a different validator than its public counterpart, it should be +# included here as a key, value pair. +URI_OPTIONS_VALIDATOR_MAP: Dict[str, Callable[[Any, Any], Any]] = { + "appname": validate_appname_or_none, + "authmechanism": validate_auth_mechanism, + "authmechanismproperties": validate_auth_mechanism_properties, + "authsource": validate_string, + "compressors": validate_compressors, + "connecttimeoutms": validate_timeout_or_none_or_zero, + "directconnection": validate_boolean_or_string, + "heartbeatfrequencyms": validate_timeout_or_none, + "journal": validate_boolean_or_string, + "localthresholdms": validate_positive_float_or_zero, + "maxidletimems": validate_timeout_or_none, + "maxconnecting": validate_positive_integer, + "maxpoolsize": validate_non_negative_integer_or_none, + "maxstalenessseconds": validate_max_staleness, + "readconcernlevel": validate_string_or_none, + "readpreference": validate_read_preference_mode, + "readpreferencetags": validate_read_preference_tags, + "replicaset": validate_string_or_none, + "retryreads": validate_boolean_or_string, + "retrywrites": validate_boolean_or_string, + "loadbalanced": validate_boolean_or_string, + "serverselectiontimeoutms": validate_timeout_or_zero, + "sockettimeoutms": validate_timeout_or_none_or_zero, + "tls": validate_boolean_or_string, + "tlsallowinvalidcertificates": validate_boolean_or_string, + "tlsallowinvalidhostnames": validate_boolean_or_string, + "tlscafile": validate_readable, + "tlscertificatekeyfile": validate_readable, + "tlscertificatekeyfilepassword": validate_string_or_none, + "tlsdisableocspendpointcheck": validate_boolean_or_string, + "tlsinsecure": validate_boolean_or_string, + "w": validate_non_negative_int_or_basestring, + "wtimeoutms": validate_non_negative_integer, + "zlibcompressionlevel": validate_zlib_compression_level, + "srvservicename": validate_string, + "srvmaxhosts": validate_non_negative_integer, + "timeoutms": validate_timeoutms, +} + +# Dictionary where keys are the names of URI options specific to pymongo, +# and values are functions that validate user-input values for those options. +NONSPEC_OPTIONS_VALIDATOR_MAP: Dict[str, Callable[[Any, Any], Any]] = { + "connect": validate_boolean_or_string, + "driver": validate_driver_or_none, + "server_api": validate_server_api_or_none, + "fsync": validate_boolean_or_string, + "minpoolsize": validate_non_negative_integer, + "tlscrlfile": validate_readable, + "tz_aware": validate_boolean_or_string, + "unicode_decode_error_handler": validate_unicode_decode_error_handler, + "uuidrepresentation": validate_uuid_representation, + "waitqueuemultiple": validate_non_negative_integer_or_none, + "waitqueuetimeoutms": validate_timeout_or_none, + "datetime_conversion": validate_datetime_conversion, +} + +# Dictionary where keys are the names of keyword-only options for the +# MongoClient constructor, and values are functions that validate user-input +# values for those options. +KW_VALIDATORS: Dict[str, Callable[[Any, Any], Any]] = { + "document_class": validate_document_class, + "type_registry": validate_type_registry, + "read_preference": validate_read_preference, + "event_listeners": _validate_event_listeners, + "tzinfo": validate_tzinfo, + "username": validate_string_or_none, + "password": validate_string_or_none, + "server_selector": validate_is_callable_or_none, + "auto_encryption_opts": validate_auto_encryption_opts_or_none, + "authoidcallowedhosts": validate_list, +} + +# Dictionary where keys are any URI option name, and values are the +# internally-used names of that URI option. Options with only one name +# variant need not be included here. Options whose public and internal +# names are the same need not be included here. +INTERNAL_URI_OPTION_NAME_MAP: Dict[str, str] = { + "ssl": "tls", +} + +# Map from deprecated URI option names to a tuple indicating the method of +# their deprecation and any additional information that may be needed to +# construct the warning message. +URI_OPTIONS_DEPRECATION_MAP: Dict[str, Tuple[str, str]] = { + # format: <deprecated option name>: (<mode>, <message>), + # Supported <mode> values: + # - 'renamed': <message> should be the new option name. Note that case is + # preserved for renamed options as they are part of user warnings. + # - 'removed': <message> may suggest the rationale for deprecating the + # option and/or recommend remedial action. + # For example: + # 'wtimeout': ('renamed', 'wTimeoutMS'), +} + +# Augment the option validator map with pymongo-specific option information. +URI_OPTIONS_VALIDATOR_MAP.update(NONSPEC_OPTIONS_VALIDATOR_MAP) +for optname, aliases in URI_OPTIONS_ALIAS_MAP.items(): + for alias in aliases: + if alias not in URI_OPTIONS_VALIDATOR_MAP: + URI_OPTIONS_VALIDATOR_MAP[alias] = URI_OPTIONS_VALIDATOR_MAP[optname] + +# Map containing all URI option and keyword argument validators. +VALIDATORS: Dict[str, Callable[[Any, Any], Any]] = URI_OPTIONS_VALIDATOR_MAP.copy() +VALIDATORS.update(KW_VALIDATORS) + +# List of timeout-related options. +TIMEOUT_OPTIONS: List[str] = [ + "connecttimeoutms", + "heartbeatfrequencyms", + "maxidletimems", + "maxstalenessseconds", + "serverselectiontimeoutms", + "sockettimeoutms", + "waitqueuetimeoutms", +] + + +_AUTH_OPTIONS = frozenset(["authmechanismproperties"]) + + +def validate_auth_option(option: str, value: Any) -> Tuple[str, Any]: + """Validate optional authentication parameters.""" + lower, value = validate(option, value) + if lower not in _AUTH_OPTIONS: + raise ConfigurationError(f"Unknown authentication option: {option}") + return option, value + + +def validate(option: str, value: Any) -> Tuple[str, Any]: + """Generic validation function.""" + lower = option.lower() + validator = VALIDATORS.get(lower, raise_config_error) + value = validator(option, value) + return option, value + + +def get_validated_options( + options: Mapping[str, Any], warn: bool = True +) -> MutableMapping[str, Any]: + """Validate each entry in options and raise a warning if it is not valid. + Returns a copy of options with invalid entries removed. + + :Parameters: + - `opts`: A dict containing MongoDB URI options. + - `warn` (optional): If ``True`` then warnings will be logged and + invalid options will be ignored. Otherwise, invalid options will + cause errors. + """ + validated_options: MutableMapping[str, Any] + if isinstance(options, _CaseInsensitiveDictionary): + validated_options = _CaseInsensitiveDictionary() + + def get_normed_key(x: str) -> str: + return x # noqa: E731 + + def get_setter_key(x: str) -> str: + return options.cased_key(x) # type: ignore[attr-defined] # noqa: E731 + + else: + validated_options = {} + + def get_normed_key(x: str) -> str: + return x.lower() # noqa: E731 + + def get_setter_key(x: str) -> str: + return x # noqa: E731 + + for opt, value in options.items(): + normed_key = get_normed_key(opt) + try: + validator = URI_OPTIONS_VALIDATOR_MAP.get(normed_key, raise_config_error) + value = validator(opt, value) + except (ValueError, TypeError, ConfigurationError) as exc: + if warn: + warnings.warn(str(exc)) + else: + raise + else: + validated_options[get_setter_key(normed_key)] = value + return validated_options + + +def _esc_coll_name(encrypted_fields: Mapping[str, Any], name: str) -> Any: + return encrypted_fields.get("escCollection", f"enxcol_.{name}.esc") + + +def _ecoc_coll_name(encrypted_fields: Mapping[str, Any], name: str) -> Any: + return encrypted_fields.get("ecocCollection", f"enxcol_.{name}.ecoc") + + +# List of write-concern-related options. +WRITE_CONCERN_OPTIONS = frozenset(["w", "wtimeout", "wtimeoutms", "fsync", "j", "journal"]) + + +class BaseObject: + """A base class that provides attributes and methods common + to multiple pymongo classes. + + SHOULD NOT BE USED BY DEVELOPERS EXTERNAL TO MONGODB. + """ + + def __init__( + self, + codec_options: CodecOptions, + read_preference: _ServerMode, + write_concern: WriteConcern, + read_concern: ReadConcern, + ) -> None: + if not isinstance(codec_options, CodecOptions): + raise TypeError("codec_options must be an instance of bson.codec_options.CodecOptions") + self.__codec_options = codec_options + + if not isinstance(read_preference, _ServerMode): + raise TypeError( + "{!r} is not valid for read_preference. See " + "pymongo.read_preferences for valid " + "options.".format(read_preference) + ) + self.__read_preference = read_preference + + if not isinstance(write_concern, WriteConcern): + raise TypeError( + "write_concern must be an instance of pymongo.write_concern.WriteConcern" + ) + self.__write_concern = write_concern + + if not isinstance(read_concern, ReadConcern): + raise TypeError("read_concern must be an instance of pymongo.read_concern.ReadConcern") + self.__read_concern = read_concern + + @property + def codec_options(self) -> CodecOptions: + """Read only access to the :class:`~bson.codec_options.CodecOptions` + of this instance. + """ + return self.__codec_options + + @property + def write_concern(self) -> WriteConcern: + """Read only access to the :class:`~pymongo.write_concern.WriteConcern` + of this instance. + + .. versionchanged:: 3.0 + The :attr:`write_concern` attribute is now read only. + """ + return self.__write_concern + + def _write_concern_for(self, session: Optional[ClientSession]) -> WriteConcern: + """Read only access to the write concern of this instance or session.""" + # Override this operation's write concern with the transaction's. + if session and session.in_transaction: + return DEFAULT_WRITE_CONCERN + return self.write_concern + + @property + def read_preference(self) -> _ServerMode: + """Read only access to the read preference of this instance. + + .. versionchanged:: 3.0 + The :attr:`read_preference` attribute is now read only. + """ + return self.__read_preference + + def _read_preference_for(self, session: Optional[ClientSession]) -> _ServerMode: + """Read only access to the read preference of this instance or session.""" + # Override this operation's read preference with the transaction's. + if session: + return session._txn_read_preference() or self.__read_preference + return self.__read_preference + + @property + def read_concern(self) -> ReadConcern: + """Read only access to the :class:`~pymongo.read_concern.ReadConcern` + of this instance. + + .. versionadded:: 3.2 + """ + return self.__read_concern + + +class _CaseInsensitiveDictionary(MutableMapping[str, Any]): + def __init__(self, *args: Any, **kwargs: Any): + self.__casedkeys: Dict[str, Any] = {} + self.__data: Dict[str, Any] = {} + self.update(dict(*args, **kwargs)) + + def __contains__(self, key: str) -> bool: # type: ignore[override] + return key.lower() in self.__data + + def __len__(self) -> int: + return len(self.__data) + + def __iter__(self) -> Iterator[str]: + return (key for key in self.__casedkeys) + + def __repr__(self) -> str: + return str({self.__casedkeys[k]: self.__data[k] for k in self}) + + def __setitem__(self, key: str, value: Any) -> None: + lc_key = key.lower() + self.__casedkeys[lc_key] = key + self.__data[lc_key] = value + + def __getitem__(self, key: str) -> Any: + return self.__data[key.lower()] + + def __delitem__(self, key: str) -> None: + lc_key = key.lower() + del self.__casedkeys[lc_key] + del self.__data[lc_key] + + def __eq__(self, other: Any) -> bool: + if not isinstance(other, abc.Mapping): + return NotImplemented + if len(self) != len(other): + return False + for key in other: + if self[key] != other[key]: + return False + + return True + + def get(self, key: str, default: Optional[Any] = None) -> Any: + return self.__data.get(key.lower(), default) + + def pop(self, key: str, *args: Any, **kwargs: Any) -> Any: + lc_key = key.lower() + self.__casedkeys.pop(lc_key, None) + return self.__data.pop(lc_key, *args, **kwargs) + + def popitem(self) -> Tuple[str, Any]: + lc_key, cased_key = self.__casedkeys.popitem() + value = self.__data.pop(lc_key) + return cased_key, value + + def clear(self) -> None: + self.__casedkeys.clear() + self.__data.clear() + + @overload + def setdefault(self, key: str, default: None = None) -> Optional[Any]: + ... + + @overload + def setdefault(self, key: str, default: Any) -> Any: + ... + + def setdefault(self, key: str, default: Optional[Any] = None) -> Optional[Any]: + lc_key = key.lower() + if key in self: + return self.__data[lc_key] + else: + self.__casedkeys[lc_key] = key + self.__data[lc_key] = default + return default + + def update(self, other: Mapping[str, Any]) -> None: # type: ignore[override] + if isinstance(other, _CaseInsensitiveDictionary): + for key in other: + self[other.cased_key(key)] = other[key] + else: + for key in other: + self[key] = other[key] + + def cased_key(self, key: str) -> Any: + return self.__casedkeys[key.lower()] diff --git a/backend/test/lib/python3.8/site-packages/pymongo/compression_support.py b/backend/test/lib/python3.8/site-packages/pymongo/compression_support.py new file mode 100644 index 0000000000000000000000000000000000000000..27fc3cdf27d1f58504941dab5ad1e116d6411a2c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/compression_support.py @@ -0,0 +1,155 @@ +# Copyright 2018 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import warnings +from typing import Any, Iterable, List, Optional, Union + +try: + import snappy + + _HAVE_SNAPPY = True +except ImportError: + # python-snappy isn't available. + _HAVE_SNAPPY = False + +try: + import zlib + + _HAVE_ZLIB = True +except ImportError: + # Python built without zlib support. + _HAVE_ZLIB = False + +try: + from zstandard import ZstdCompressor, ZstdDecompressor + + _HAVE_ZSTD = True +except ImportError: + _HAVE_ZSTD = False + +from pymongo.hello import HelloCompat +from pymongo.monitoring import _SENSITIVE_COMMANDS + +_SUPPORTED_COMPRESSORS = {"snappy", "zlib", "zstd"} +_NO_COMPRESSION = {HelloCompat.CMD, HelloCompat.LEGACY_CMD} +_NO_COMPRESSION.update(_SENSITIVE_COMMANDS) + + +def validate_compressors(dummy: Any, value: Union[str, Iterable[str]]) -> List[str]: + try: + # `value` is string. + compressors = value.split(",") # type: ignore[union-attr] + except AttributeError: + # `value` is an iterable. + compressors = list(value) + + for compressor in compressors[:]: + if compressor not in _SUPPORTED_COMPRESSORS: + compressors.remove(compressor) + warnings.warn(f"Unsupported compressor: {compressor}") + elif compressor == "snappy" and not _HAVE_SNAPPY: + compressors.remove(compressor) + warnings.warn( + "Wire protocol compression with snappy is not available. " + "You must install the python-snappy module for snappy support." + ) + elif compressor == "zlib" and not _HAVE_ZLIB: + compressors.remove(compressor) + warnings.warn( + "Wire protocol compression with zlib is not available. " + "The zlib module is not available." + ) + elif compressor == "zstd" and not _HAVE_ZSTD: + compressors.remove(compressor) + warnings.warn( + "Wire protocol compression with zstandard is not available. " + "You must install the zstandard module for zstandard support." + ) + return compressors + + +def validate_zlib_compression_level(option: str, value: Any) -> int: + try: + level = int(value) + except Exception: + raise TypeError(f"{option} must be an integer, not {value!r}.") + if level < -1 or level > 9: + raise ValueError("%s must be between -1 and 9, not %d." % (option, level)) + return level + + +class CompressionSettings: + def __init__(self, compressors: List[str], zlib_compression_level: int): + self.compressors = compressors + self.zlib_compression_level = zlib_compression_level + + def get_compression_context( + self, compressors: Optional[List[str]] + ) -> Union[SnappyContext, ZlibContext, ZstdContext, None]: + if compressors: + chosen = compressors[0] + if chosen == "snappy": + return SnappyContext() + elif chosen == "zlib": + return ZlibContext(self.zlib_compression_level) + elif chosen == "zstd": + return ZstdContext() + return None + return None + + +class SnappyContext: + compressor_id = 1 + + @staticmethod + def compress(data: bytes) -> bytes: + return snappy.compress(data) + + +class ZlibContext: + compressor_id = 2 + + def __init__(self, level: int): + self.level = level + + def compress(self, data: bytes) -> bytes: + return zlib.compress(data, self.level) + + +class ZstdContext: + compressor_id = 3 + + @staticmethod + def compress(data: bytes) -> bytes: + # ZstdCompressor is not thread safe. + # TODO: Use a pool? + return ZstdCompressor().compress(data) + + +def decompress(data: bytes, compressor_id: int) -> bytes: + if compressor_id == SnappyContext.compressor_id: + # python-snappy doesn't support the buffer interface. + # https://github.com/andrix/python-snappy/issues/65 + # This only matters when data is a memoryview since + # id(bytes(data)) == id(data) when data is a bytes. + return snappy.uncompress(bytes(data)) + elif compressor_id == ZlibContext.compressor_id: + return zlib.decompress(data) + elif compressor_id == ZstdContext.compressor_id: + # ZstdDecompressor is not thread safe. + # TODO: Use a pool? + return ZstdDecompressor().decompress(data) + else: + raise ValueError("Unknown compressorId %d" % (compressor_id,)) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/cursor.py b/backend/test/lib/python3.8/site-packages/pymongo/cursor.py new file mode 100644 index 0000000000000000000000000000000000000000..2bf420ac1140c98062c47004a704a59a26cd48a6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/cursor.py @@ -0,0 +1,1376 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Cursor class to iterate over Mongo query results.""" +from __future__ import annotations + +import copy +import warnings +from collections import deque +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generic, + Iterable, + List, + Mapping, + NoReturn, + Optional, + Sequence, + Tuple, + Union, + cast, + overload, +) + +from bson import RE_TYPE, _convert_raw_document_lists_to_streams +from bson.code import Code +from bson.son import SON +from pymongo import helpers +from pymongo.collation import validate_collation_or_none +from pymongo.common import ( + validate_boolean, + validate_is_document_type, + validate_is_mapping, +) +from pymongo.errors import ConnectionFailure, InvalidOperation, OperationFailure +from pymongo.lock import _create_lock +from pymongo.message import ( + _CursorAddress, + _GetMore, + _OpMsg, + _OpReply, + _Query, + _RawBatchGetMore, + _RawBatchQuery, +) +from pymongo.response import PinnedResponse +from pymongo.typings import _Address, _CollationIn, _DocumentOut, _DocumentType + +if TYPE_CHECKING: + from _typeshed import SupportsItems + + from bson.codec_options import CodecOptions + from pymongo.client_session import ClientSession + from pymongo.collection import Collection + from pymongo.pool import Connection + from pymongo.read_preferences import _ServerMode + + +# These errors mean that the server has already killed the cursor so there is +# no need to send killCursors. +_CURSOR_CLOSED_ERRORS = frozenset( + [ + 43, # CursorNotFound + 50, # MaxTimeMSExpired + 175, # QueryPlanKilled + 237, # CursorKilled + # On a tailable cursor, the following errors mean the capped collection + # rolled over. + # MongoDB 2.6: + # {'$err': 'Runner killed during getMore', 'code': 28617, 'ok': 0} + 28617, + # MongoDB 3.0: + # {'$err': 'getMore executor error: UnknownError no details available', + # 'code': 17406, 'ok': 0} + 17406, + # MongoDB 3.2 + 3.4: + # {'ok': 0.0, 'errmsg': 'GetMore command executor error: + # CappedPositionLost: CollectionScan died due to failure to restore + # tailable cursor position. Last seen record id: RecordId(3)', + # 'code': 96} + 96, + # MongoDB 3.6+: + # {'ok': 0.0, 'errmsg': 'errmsg: "CollectionScan died due to failure to + # restore tailable cursor position. Last seen record id: RecordId(3)"', + # 'code': 136, 'codeName': 'CappedPositionLost'} + 136, + ] +) + +_QUERY_OPTIONS = { + "tailable_cursor": 2, + "secondary_okay": 4, + "oplog_replay": 8, + "no_timeout": 16, + "await_data": 32, + "exhaust": 64, + "partial": 128, +} + + +class CursorType: + NON_TAILABLE = 0 + """The standard cursor type.""" + + TAILABLE = _QUERY_OPTIONS["tailable_cursor"] + """The tailable cursor type. + + Tailable cursors are only for use with capped collections. They are not + closed when the last data is retrieved but are kept open and the cursor + location marks the final document position. If more data is received + iteration of the cursor will continue from the last document received. + """ + + TAILABLE_AWAIT = TAILABLE | _QUERY_OPTIONS["await_data"] + """A tailable cursor with the await option set. + + Creates a tailable cursor that will wait for a few seconds after returning + the full result set so that it can capture and return additional data added + during the query. + """ + + EXHAUST = _QUERY_OPTIONS["exhaust"] + """An exhaust cursor. + + MongoDB will stream batched results to the client without waiting for the + client to request each batch, reducing latency. + """ + + +class _ConnectionManager: + """Used with exhaust cursors to ensure the connection is returned.""" + + def __init__(self, conn: Connection, more_to_come: bool): + self.conn: Optional[Connection] = conn + self.more_to_come = more_to_come + self.lock = _create_lock() + + def update_exhaust(self, more_to_come: bool) -> None: + self.more_to_come = more_to_come + + def close(self) -> None: + """Return this instance's connection to the connection pool.""" + if self.conn: + self.conn.unpin() + self.conn = None + + +_Sort = Sequence[Union[str, Tuple[str, Union[int, str, Mapping[str, Any]]]]] +_Hint = Union[str, _Sort] + + +class Cursor(Generic[_DocumentType]): + """A cursor / iterator over Mongo query results.""" + + _query_class = _Query + _getmore_class = _GetMore + + def __init__( + self, + collection: Collection[_DocumentType], + filter: Optional[Mapping[str, Any]] = None, + projection: Optional[Union[Mapping[str, Any], Iterable[str]]] = None, + skip: int = 0, + limit: int = 0, + no_cursor_timeout: bool = False, + cursor_type: int = CursorType.NON_TAILABLE, + sort: Optional[_Sort] = None, + allow_partial_results: bool = False, + oplog_replay: bool = False, + batch_size: int = 0, + collation: Optional[_CollationIn] = None, + hint: Optional[_Hint] = None, + max_scan: Optional[int] = None, + max_time_ms: Optional[int] = None, + max: Optional[_Sort] = None, + min: Optional[_Sort] = None, + return_key: Optional[bool] = None, + show_record_id: Optional[bool] = None, + snapshot: Optional[bool] = None, + comment: Optional[Any] = None, + session: Optional[ClientSession] = None, + allow_disk_use: Optional[bool] = None, + let: Optional[bool] = None, + ) -> None: + """Create a new cursor. + + Should not be called directly by application developers - see + :meth:`~pymongo.collection.Collection.find` instead. + + .. seealso:: The MongoDB documentation on `cursors <https://dochub.mongodb.org/core/cursors>`_. + """ + # Initialize all attributes used in __del__ before possibly raising + # an error to avoid attribute errors during garbage collection. + self.__collection: Collection[_DocumentType] = collection + self.__id: Any = None + self.__exhaust = False + self.__sock_mgr: Any = None + self.__killed = False + self.__session: Optional[ClientSession] + + if session: + self.__session = session + self.__explicit_session = True + else: + self.__session = None + self.__explicit_session = False + + spec: Mapping[str, Any] = filter or {} + validate_is_mapping("filter", spec) + if not isinstance(skip, int): + raise TypeError("skip must be an instance of int") + if not isinstance(limit, int): + raise TypeError("limit must be an instance of int") + validate_boolean("no_cursor_timeout", no_cursor_timeout) + if no_cursor_timeout and not self.__explicit_session: + warnings.warn( + "use an explicit session with no_cursor_timeout=True " + "otherwise the cursor may still timeout after " + "30 minutes, for more info see " + "https://mongodb.com/docs/v4.4/reference/method/" + "cursor.noCursorTimeout/" + "#session-idle-timeout-overrides-nocursortimeout", + UserWarning, + stacklevel=2, + ) + if cursor_type not in ( + CursorType.NON_TAILABLE, + CursorType.TAILABLE, + CursorType.TAILABLE_AWAIT, + CursorType.EXHAUST, + ): + raise ValueError("not a valid value for cursor_type") + validate_boolean("allow_partial_results", allow_partial_results) + validate_boolean("oplog_replay", oplog_replay) + if not isinstance(batch_size, int): + raise TypeError("batch_size must be an integer") + if batch_size < 0: + raise ValueError("batch_size must be >= 0") + # Only set if allow_disk_use is provided by the user, else None. + if allow_disk_use is not None: + allow_disk_use = validate_boolean("allow_disk_use", allow_disk_use) + + if projection is not None: + projection = helpers._fields_list_to_dict(projection, "projection") + + if let is not None: + validate_is_document_type("let", let) + + self.__let = let + self.__spec = spec + self.__has_filter = filter is not None + self.__projection = projection + self.__skip = skip + self.__limit = limit + self.__batch_size = batch_size + self.__ordering = sort and helpers._index_document(sort) or None + self.__max_scan = max_scan + self.__explain = False + self.__comment = comment + self.__max_time_ms = max_time_ms + self.__max_await_time_ms: Optional[int] = None + self.__max: Optional[Union[SON[Any, Any], _Sort]] = max + self.__min: Optional[Union[SON[Any, Any], _Sort]] = min + self.__collation = validate_collation_or_none(collation) + self.__return_key = return_key + self.__show_record_id = show_record_id + self.__allow_disk_use = allow_disk_use + self.__snapshot = snapshot + self.__hint: Union[str, SON[str, Any], None] + self.__set_hint(hint) + + # Exhaust cursor support + if cursor_type == CursorType.EXHAUST: + if self.__collection.database.client.is_mongos: + raise InvalidOperation("Exhaust cursors are not supported by mongos") + if limit: + raise InvalidOperation("Can't use limit and exhaust together.") + self.__exhaust = True + + # This is ugly. People want to be able to do cursor[5:5] and + # get an empty result set (old behavior was an + # exception). It's hard to do that right, though, because the + # server uses limit(0) to mean 'no limit'. So we set __empty + # in that case and check for it when iterating. We also unset + # it anytime we change __limit. + self.__empty = False + + self.__data: deque = deque() + self.__address: Optional[_Address] = None + self.__retrieved = 0 + + self.__codec_options = collection.codec_options + # Read preference is set when the initial find is sent. + self.__read_preference: Optional[_ServerMode] = None + self.__read_concern = collection.read_concern + + self.__query_flags = cursor_type + if no_cursor_timeout: + self.__query_flags |= _QUERY_OPTIONS["no_timeout"] + if allow_partial_results: + self.__query_flags |= _QUERY_OPTIONS["partial"] + if oplog_replay: + self.__query_flags |= _QUERY_OPTIONS["oplog_replay"] + + # The namespace to use for find/getMore commands. + self.__dbname = collection.database.name + self.__collname = collection.name + + @property + def collection(self) -> Collection[_DocumentType]: + """The :class:`~pymongo.collection.Collection` that this + :class:`Cursor` is iterating. + """ + return self.__collection + + @property + def retrieved(self) -> int: + """The number of documents retrieved so far.""" + return self.__retrieved + + def __del__(self) -> None: + self.__die() + + def rewind(self) -> "Cursor[_DocumentType]": + """Rewind this cursor to its unevaluated state. + + Reset this cursor if it has been partially or completely evaluated. + Any options that are present on the cursor will remain in effect. + Future iterating performed on this cursor will cause new queries to + be sent to the server, even if the resultant data has already been + retrieved by this cursor. + """ + self.close() + self.__data = deque() + self.__id = None + self.__address = None + self.__retrieved = 0 + self.__killed = False + + return self + + def clone(self) -> "Cursor[_DocumentType]": + """Get a clone of this cursor. + + Returns a new Cursor instance with options matching those that have + been set on the current instance. The clone will be completely + unevaluated, even if the current instance has been partially or + completely evaluated. + """ + return self._clone(True) + + def _clone(self, deepcopy: bool = True, base: Optional[Cursor] = None) -> Cursor: + """Internal clone helper.""" + if not base: + if self.__explicit_session: + base = self._clone_base(self.__session) + else: + base = self._clone_base(None) + + values_to_clone = ( + "spec", + "projection", + "skip", + "limit", + "max_time_ms", + "max_await_time_ms", + "comment", + "max", + "min", + "ordering", + "explain", + "hint", + "batch_size", + "max_scan", + "query_flags", + "collation", + "empty", + "show_record_id", + "return_key", + "allow_disk_use", + "snapshot", + "exhaust", + "has_filter", + ) + data = { + k: v + for k, v in self.__dict__.items() + if k.startswith("_Cursor__") and k[9:] in values_to_clone + } + if deepcopy: + data = self._deepcopy(data) + base.__dict__.update(data) + return base + + def _clone_base(self, session: Optional[ClientSession]) -> Cursor: + """Creates an empty Cursor object for information to be copied into.""" + return self.__class__(self.__collection, session=session) + + def __die(self, synchronous: bool = False) -> None: + """Closes this cursor.""" + try: + already_killed = self.__killed + except AttributeError: + # __init__ did not run to completion (or at all). + return + + self.__killed = True + if self.__id and not already_killed: + cursor_id = self.__id + assert self.__address is not None + address = _CursorAddress(self.__address, f"{self.__dbname}.{self.__collname}") + else: + # Skip killCursors. + cursor_id = 0 + address = None + self.__collection.database.client._cleanup_cursor( + synchronous, + cursor_id, + address, + self.__sock_mgr, + self.__session, + self.__explicit_session, + ) + if not self.__explicit_session: + self.__session = None + self.__sock_mgr = None + + def close(self) -> None: + """Explicitly close / kill this cursor.""" + self.__die(True) + + def __query_spec(self) -> Mapping[str, Any]: + """Get the spec to use for a query.""" + operators: Dict[str, Any] = {} + if self.__ordering: + operators["$orderby"] = self.__ordering + if self.__explain: + operators["$explain"] = True + if self.__hint: + operators["$hint"] = self.__hint + if self.__let: + operators["let"] = self.__let + if self.__comment: + operators["$comment"] = self.__comment + if self.__max_scan: + operators["$maxScan"] = self.__max_scan + if self.__max_time_ms is not None: + operators["$maxTimeMS"] = self.__max_time_ms + if self.__max: + operators["$max"] = self.__max + if self.__min: + operators["$min"] = self.__min + if self.__return_key is not None: + operators["$returnKey"] = self.__return_key + if self.__show_record_id is not None: + # This is upgraded to showRecordId for MongoDB 3.2+ "find" command. + operators["$showDiskLoc"] = self.__show_record_id + if self.__snapshot is not None: + operators["$snapshot"] = self.__snapshot + + if operators: + # Make a shallow copy so we can cleanly rewind or clone. + spec = copy.copy(self.__spec) + + # Allow-listed commands must be wrapped in $query. + if "$query" not in spec: + # $query has to come first + spec = SON([("$query", spec)]) + + if not isinstance(spec, SON): + # Ensure the spec is SON. As order is important this will + # ensure its set before merging in any extra operators. + spec = SON(spec) + + spec.update(operators) + return spec + # Have to wrap with $query if "query" is the first key. + # We can't just use $query anytime "query" is a key as + # that breaks commands like count and find_and_modify. + # Checking spec.keys()[0] covers the case that the spec + # was passed as an instance of SON or OrderedDict. + elif "query" in self.__spec and ( + len(self.__spec) == 1 or next(iter(self.__spec)) == "query" + ): + return SON({"$query": self.__spec}) + + return self.__spec + + def __check_okay_to_chain(self) -> None: + """Check if it is okay to chain more options onto this cursor.""" + if self.__retrieved or self.__id is not None: + raise InvalidOperation("cannot set options after executing query") + + def add_option(self, mask: int) -> "Cursor[_DocumentType]": + """Set arbitrary query flags using a bitmask. + + To set the tailable flag: + cursor.add_option(2) + """ + if not isinstance(mask, int): + raise TypeError("mask must be an int") + self.__check_okay_to_chain() + + if mask & _QUERY_OPTIONS["exhaust"]: + if self.__limit: + raise InvalidOperation("Can't use limit and exhaust together.") + if self.__collection.database.client.is_mongos: + raise InvalidOperation("Exhaust cursors are not supported by mongos") + self.__exhaust = True + + self.__query_flags |= mask + return self + + def remove_option(self, mask: int) -> "Cursor[_DocumentType]": + """Unset arbitrary query flags using a bitmask. + + To unset the tailable flag: + cursor.remove_option(2) + """ + if not isinstance(mask, int): + raise TypeError("mask must be an int") + self.__check_okay_to_chain() + + if mask & _QUERY_OPTIONS["exhaust"]: + self.__exhaust = False + + self.__query_flags &= ~mask + return self + + def allow_disk_use(self, allow_disk_use: bool) -> "Cursor[_DocumentType]": + """Specifies whether MongoDB can use temporary disk files while + processing a blocking sort operation. + + Raises :exc:`TypeError` if `allow_disk_use` is not a boolean. + + .. note:: `allow_disk_use` requires server version **>= 4.4** + + :Parameters: + - `allow_disk_use`: if True, MongoDB may use temporary + disk files to store data exceeding the system memory limit while + processing a blocking sort operation. + + .. versionadded:: 3.11 + """ + if not isinstance(allow_disk_use, bool): + raise TypeError("allow_disk_use must be a bool") + self.__check_okay_to_chain() + + self.__allow_disk_use = allow_disk_use + return self + + def limit(self, limit: int) -> "Cursor[_DocumentType]": + """Limits the number of results to be returned by this cursor. + + Raises :exc:`TypeError` if `limit` is not an integer. Raises + :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` + has already been used. The last `limit` applied to this cursor + takes precedence. A limit of ``0`` is equivalent to no limit. + + :Parameters: + - `limit`: the number of results to return + + .. seealso:: The MongoDB documentation on `limit <https://dochub.mongodb.org/core/limit>`_. + """ + if not isinstance(limit, int): + raise TypeError("limit must be an integer") + if self.__exhaust: + raise InvalidOperation("Can't use limit and exhaust together.") + self.__check_okay_to_chain() + + self.__empty = False + self.__limit = limit + return self + + def batch_size(self, batch_size: int) -> "Cursor[_DocumentType]": + """Limits the number of documents returned in one batch. Each batch + requires a round trip to the server. It can be adjusted to optimize + performance and limit data transfer. + + .. note:: batch_size can not override MongoDB's internal limits on the + amount of data it will return to the client in a single batch (i.e + if you set batch size to 1,000,000,000, MongoDB will currently only + return 4-16MB of results per batch). + + Raises :exc:`TypeError` if `batch_size` is not an integer. + Raises :exc:`ValueError` if `batch_size` is less than ``0``. + Raises :exc:`~pymongo.errors.InvalidOperation` if this + :class:`Cursor` has already been used. The last `batch_size` + applied to this cursor takes precedence. + + :Parameters: + - `batch_size`: The size of each batch of results requested. + """ + if not isinstance(batch_size, int): + raise TypeError("batch_size must be an integer") + if batch_size < 0: + raise ValueError("batch_size must be >= 0") + self.__check_okay_to_chain() + + self.__batch_size = batch_size + return self + + def skip(self, skip: int) -> "Cursor[_DocumentType]": + """Skips the first `skip` results of this cursor. + + Raises :exc:`TypeError` if `skip` is not an integer. Raises + :exc:`ValueError` if `skip` is less than ``0``. Raises + :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has + already been used. The last `skip` applied to this cursor takes + precedence. + + :Parameters: + - `skip`: the number of results to skip + """ + if not isinstance(skip, int): + raise TypeError("skip must be an integer") + if skip < 0: + raise ValueError("skip must be >= 0") + self.__check_okay_to_chain() + + self.__skip = skip + return self + + def max_time_ms(self, max_time_ms: Optional[int]) -> "Cursor[_DocumentType]": + """Specifies a time limit for a query operation. If the specified + time is exceeded, the operation will be aborted and + :exc:`~pymongo.errors.ExecutionTimeout` is raised. If `max_time_ms` + is ``None`` no limit is applied. + + Raises :exc:`TypeError` if `max_time_ms` is not an integer or ``None``. + Raises :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` + has already been used. + + :Parameters: + - `max_time_ms`: the time limit after which the operation is aborted + """ + if not isinstance(max_time_ms, int) and max_time_ms is not None: + raise TypeError("max_time_ms must be an integer or None") + self.__check_okay_to_chain() + + self.__max_time_ms = max_time_ms + return self + + def max_await_time_ms(self, max_await_time_ms: Optional[int]) -> "Cursor[_DocumentType]": + """Specifies a time limit for a getMore operation on a + :attr:`~pymongo.cursor.CursorType.TAILABLE_AWAIT` cursor. For all other + types of cursor max_await_time_ms is ignored. + + Raises :exc:`TypeError` if `max_await_time_ms` is not an integer or + ``None``. Raises :exc:`~pymongo.errors.InvalidOperation` if this + :class:`Cursor` has already been used. + + .. note:: `max_await_time_ms` requires server version **>= 3.2** + + :Parameters: + - `max_await_time_ms`: the time limit after which the operation is + aborted + + .. versionadded:: 3.2 + """ + if not isinstance(max_await_time_ms, int) and max_await_time_ms is not None: + raise TypeError("max_await_time_ms must be an integer or None") + self.__check_okay_to_chain() + + # Ignore max_await_time_ms if not tailable or await_data is False. + if self.__query_flags & CursorType.TAILABLE_AWAIT: + self.__max_await_time_ms = max_await_time_ms + + return self + + @overload + def __getitem__(self, index: int) -> _DocumentType: + ... + + @overload + def __getitem__(self, index: slice) -> "Cursor[_DocumentType]": + ... + + def __getitem__(self, index: Union[int, slice]) -> Union[_DocumentType, Cursor[_DocumentType]]: + """Get a single document or a slice of documents from this cursor. + + .. warning:: A :class:`~Cursor` is not a Python :class:`list`. Each + index access or slice requires that a new query be run using skip + and limit. Do not iterate the cursor using index accesses. + The following example is **extremely inefficient** and may return + surprising results:: + + cursor = db.collection.find() + # Warning: This runs a new query for each document. + # Don't do this! + for idx in range(10): + print(cursor[idx]) + + Raises :class:`~pymongo.errors.InvalidOperation` if this + cursor has already been used. + + To get a single document use an integral index, e.g.:: + + >>> db.test.find()[50] + + An :class:`IndexError` will be raised if the index is negative + or greater than the amount of documents in this cursor. Any + limit previously applied to this cursor will be ignored. + + To get a slice of documents use a slice index, e.g.:: + + >>> db.test.find()[20:25] + + This will return this cursor with a limit of ``5`` and skip of + ``20`` applied. Using a slice index will override any prior + limits or skips applied to this cursor (including those + applied through previous calls to this method). Raises + :class:`IndexError` when the slice has a step, a negative + start value, or a stop value less than or equal to the start + value. + + :Parameters: + - `index`: An integer or slice index to be applied to this cursor + """ + self.__check_okay_to_chain() + self.__empty = False + if isinstance(index, slice): + if index.step is not None: + raise IndexError("Cursor instances do not support slice steps") + + skip = 0 + if index.start is not None: + if index.start < 0: + raise IndexError("Cursor instances do not support negative indices") + skip = index.start + + if index.stop is not None: + limit = index.stop - skip + if limit < 0: + raise IndexError( + "stop index must be greater than start index for slice %r" % index + ) + if limit == 0: + self.__empty = True + else: + limit = 0 + + self.__skip = skip + self.__limit = limit + return self + + if isinstance(index, int): + if index < 0: + raise IndexError("Cursor instances do not support negative indices") + clone = self.clone() + clone.skip(index + self.__skip) + clone.limit(-1) # use a hard limit + clone.__query_flags &= ~CursorType.TAILABLE_AWAIT # PYTHON-1371 + for doc in clone: + return doc + raise IndexError("no such item for Cursor instance") + raise TypeError("index %r cannot be applied to Cursor instances" % index) + + def max_scan(self, max_scan: Optional[int]) -> "Cursor[_DocumentType]": + """**DEPRECATED** - Limit the number of documents to scan when + performing the query. + + Raises :class:`~pymongo.errors.InvalidOperation` if this + cursor has already been used. Only the last :meth:`max_scan` + applied to this cursor has any effect. + + :Parameters: + - `max_scan`: the maximum number of documents to scan + + .. versionchanged:: 3.7 + Deprecated :meth:`max_scan`. Support for this option is deprecated in + MongoDB 4.0. Use :meth:`max_time_ms` instead to limit server side + execution time. + """ + self.__check_okay_to_chain() + self.__max_scan = max_scan + return self + + def max(self, spec: _Sort) -> "Cursor[_DocumentType]": + """Adds ``max`` operator that specifies upper bound for specific index. + + When using ``max``, :meth:`~hint` should also be configured to ensure + the query uses the expected index and starting in MongoDB 4.2 + :meth:`~hint` will be required. + + :Parameters: + - `spec`: a list of field, limit pairs specifying the exclusive + upper bound for all keys of a specific index in order. + + .. versionchanged:: 3.8 + Deprecated cursors that use ``max`` without a :meth:`~hint`. + + .. versionadded:: 2.7 + """ + if not isinstance(spec, (list, tuple)): + raise TypeError("spec must be an instance of list or tuple") + + self.__check_okay_to_chain() + self.__max = SON(spec) + return self + + def min(self, spec: _Sort) -> "Cursor[_DocumentType]": + """Adds ``min`` operator that specifies lower bound for specific index. + + When using ``min``, :meth:`~hint` should also be configured to ensure + the query uses the expected index and starting in MongoDB 4.2 + :meth:`~hint` will be required. + + :Parameters: + - `spec`: a list of field, limit pairs specifying the inclusive + lower bound for all keys of a specific index in order. + + .. versionchanged:: 3.8 + Deprecated cursors that use ``min`` without a :meth:`~hint`. + + .. versionadded:: 2.7 + """ + if not isinstance(spec, (list, tuple)): + raise TypeError("spec must be an instance of list or tuple") + + self.__check_okay_to_chain() + self.__min = SON(spec) + return self + + def sort( + self, key_or_list: _Hint, direction: Optional[Union[int, str]] = None + ) -> "Cursor[_DocumentType]": + """Sorts this cursor's results. + + Pass a field name and a direction, either + :data:`~pymongo.ASCENDING` or :data:`~pymongo.DESCENDING`.:: + + for doc in collection.find().sort('field', pymongo.ASCENDING): + print(doc) + + To sort by multiple fields, pass a list of (key, direction) pairs. + If just a name is given, :data:`~pymongo.ASCENDING` will be inferred:: + + for doc in collection.find().sort([ + 'field1', + ('field2', pymongo.DESCENDING)]): + print(doc) + + Text search results can be sorted by relevance:: + + cursor = db.test.find( + {'$text': {'$search': 'some words'}}, + {'score': {'$meta': 'textScore'}}) + + # Sort by 'score' field. + cursor.sort([('score', {'$meta': 'textScore'})]) + + for doc in cursor: + print(doc) + + For more advanced text search functionality, see MongoDB's + `Atlas Search <https://docs.atlas.mongodb.com/atlas-search/>`_. + + Raises :class:`~pymongo.errors.InvalidOperation` if this cursor has + already been used. Only the last :meth:`sort` applied to this + cursor has any effect. + + :Parameters: + - `key_or_list`: a single key or a list of (key, direction) + pairs specifying the keys to sort on + - `direction` (optional): only used if `key_or_list` is a single + key, if not given :data:`~pymongo.ASCENDING` is assumed + """ + self.__check_okay_to_chain() + keys = helpers._index_list(key_or_list, direction) + self.__ordering = helpers._index_document(keys) + return self + + def distinct(self, key: str) -> List: + """Get a list of distinct values for `key` among all documents + in the result set of this query. + + Raises :class:`TypeError` if `key` is not an instance of + :class:`str`. + + The :meth:`distinct` method obeys the + :attr:`~pymongo.collection.Collection.read_preference` of the + :class:`~pymongo.collection.Collection` instance on which + :meth:`~pymongo.collection.Collection.find` was called. + + :Parameters: + - `key`: name of key for which we want to get the distinct values + + .. seealso:: :meth:`pymongo.collection.Collection.distinct` + """ + options: Dict[str, Any] = {} + if self.__spec: + options["query"] = self.__spec + if self.__max_time_ms is not None: + options["maxTimeMS"] = self.__max_time_ms + if self.__comment: + options["comment"] = self.__comment + if self.__collation is not None: + options["collation"] = self.__collation + + return self.__collection.distinct(key, session=self.__session, **options) + + def explain(self) -> _DocumentType: + """Returns an explain plan record for this cursor. + + .. note:: This method uses the default verbosity mode of the + `explain command + <https://mongodb.com/docs/manual/reference/command/explain/>`_, + ``allPlansExecution``. To use a different verbosity use + :meth:`~pymongo.database.Database.command` to run the explain + command directly. + + .. seealso:: The MongoDB documentation on `explain <https://dochub.mongodb.org/core/explain>`_. + """ + c = self.clone() + c.__explain = True + + # always use a hard limit for explains + if c.__limit: + c.__limit = -abs(c.__limit) + return next(c) + + def __set_hint(self, index: Optional[_Hint]) -> None: + if index is None: + self.__hint = None + return + + if isinstance(index, str): + self.__hint = index + else: + self.__hint = helpers._index_document(index) + + def hint(self, index: Optional[_Hint]) -> "Cursor[_DocumentType]": + """Adds a 'hint', telling Mongo the proper index to use for the query. + + Judicious use of hints can greatly improve query + performance. When doing a query on multiple fields (at least + one of which is indexed) pass the indexed field as a hint to + the query. Raises :class:`~pymongo.errors.OperationFailure` if the + provided hint requires an index that does not exist on this collection, + and raises :class:`~pymongo.errors.InvalidOperation` if this cursor has + already been used. + + `index` should be an index as passed to + :meth:`~pymongo.collection.Collection.create_index` + (e.g. ``[('field', ASCENDING)]``) or the name of the index. + If `index` is ``None`` any existing hint for this query is + cleared. The last hint applied to this cursor takes precedence + over all others. + + :Parameters: + - `index`: index to hint on (as an index specifier) + """ + self.__check_okay_to_chain() + self.__set_hint(index) + return self + + def comment(self, comment: Any) -> "Cursor[_DocumentType]": + """Adds a 'comment' to the cursor. + + http://mongodb.com/docs/manual/reference/operator/comment/ + + :Parameters: + - `comment`: A string to attach to the query to help interpret and + trace the operation in the server logs and in profile data. + + .. versionadded:: 2.7 + """ + self.__check_okay_to_chain() + self.__comment = comment + return self + + def where(self, code: Union[str, Code]) -> "Cursor[_DocumentType]": + """Adds a `$where`_ clause to this query. + + The `code` argument must be an instance of :class:`str` or + :class:`~bson.code.Code` containing a JavaScript expression. + This expression will be evaluated for each document scanned. + Only those documents for which the expression evaluates to + *true* will be returned as results. The keyword *this* refers + to the object currently being scanned. For example:: + + # Find all documents where field "a" is less than "b" plus "c". + for doc in db.test.find().where('this.a < (this.b + this.c)'): + print(doc) + + Raises :class:`TypeError` if `code` is not an instance of + :class:`str`. Raises :class:`~pymongo.errors.InvalidOperation` if this + :class:`Cursor` has already been used. Only the last call to + :meth:`where` applied to a :class:`Cursor` has any effect. + + .. note:: MongoDB 4.4 drops support for :class:`~bson.code.Code` + with scope variables. Consider using `$expr`_ instead. + + :Parameters: + - `code`: JavaScript expression to use as a filter + + .. _$expr: https://mongodb.com/docs/manual/reference/operator/query/expr/ + .. _$where: https://mongodb.com/docs/manual/reference/operator/query/where/ + """ + self.__check_okay_to_chain() + if not isinstance(code, Code): + code = Code(code) + + # Avoid overwriting a filter argument that was given by the user + # when updating the spec. + spec: Dict[str, Any] + if self.__has_filter: + spec = dict(self.__spec) + else: + spec = cast(Dict, self.__spec) + spec["$where"] = code + self.__spec = spec + return self + + def collation(self, collation: Optional[_CollationIn]) -> "Cursor[_DocumentType]": + """Adds a :class:`~pymongo.collation.Collation` to this query. + + Raises :exc:`TypeError` if `collation` is not an instance of + :class:`~pymongo.collation.Collation` or a ``dict``. Raises + :exc:`~pymongo.errors.InvalidOperation` if this :class:`Cursor` has + already been used. Only the last collation applied to this cursor has + any effect. + + :Parameters: + - `collation`: An instance of :class:`~pymongo.collation.Collation`. + """ + self.__check_okay_to_chain() + self.__collation = validate_collation_or_none(collation) + return self + + def __send_message(self, operation: Union[_Query, _GetMore]) -> None: + """Send a query or getmore operation and handles the response. + + If operation is ``None`` this is an exhaust cursor, which reads + the next result batch off the exhaust socket instead of + sending getMore messages to the server. + + Can raise ConnectionFailure. + """ + client = self.__collection.database.client + # OP_MSG is required to support exhaust cursors with encryption. + if client._encrypter and self.__exhaust: + raise InvalidOperation("exhaust cursors do not support auto encryption") + + try: + response = client._run_operation( + operation, self._unpack_response, address=self.__address + ) + except OperationFailure as exc: + if exc.code in _CURSOR_CLOSED_ERRORS or self.__exhaust: + # Don't send killCursors because the cursor is already closed. + self.__killed = True + self.close() + # If this is a tailable cursor the error is likely + # due to capped collection roll over. Setting + # self.__killed to True ensures Cursor.alive will be + # False. No need to re-raise. + if ( + exc.code in _CURSOR_CLOSED_ERRORS + and self.__query_flags & _QUERY_OPTIONS["tailable_cursor"] + ): + return + raise + except ConnectionFailure: + # Don't send killCursors because the cursor is already closed. + self.__killed = True + self.close() + raise + except Exception: + self.close() + raise + + self.__address = response.address + if isinstance(response, PinnedResponse): + if not self.__sock_mgr: + self.__sock_mgr = _ConnectionManager(response.conn, response.more_to_come) + + cmd_name = operation.name + docs = response.docs + if response.from_command: + if cmd_name != "explain": + cursor = docs[0]["cursor"] + self.__id = cursor["id"] + if cmd_name == "find": + documents = cursor["firstBatch"] + # Update the namespace used for future getMore commands. + ns = cursor.get("ns") + if ns: + self.__dbname, self.__collname = ns.split(".", 1) + else: + documents = cursor["nextBatch"] + self.__data = deque(documents) + self.__retrieved += len(documents) + else: + self.__id = 0 + self.__data = deque(docs) + self.__retrieved += len(docs) + else: + assert isinstance(response.data, _OpReply) + self.__id = response.data.cursor_id + self.__data = deque(docs) + self.__retrieved += response.data.number_returned + + if self.__id == 0: + # Don't wait for garbage collection to call __del__, return the + # socket and the session to the pool now. + self.close() + + if self.__limit and self.__id and self.__limit <= self.__retrieved: + self.close() + + def _unpack_response( + self, + response: Union[_OpReply, _OpMsg], + cursor_id: Optional[int], + codec_options: CodecOptions, + user_fields: Optional[Mapping[str, Any]] = None, + legacy_response: bool = False, + ) -> Sequence[_DocumentOut]: + return response.unpack_response(cursor_id, codec_options, user_fields, legacy_response) + + def _read_preference(self) -> _ServerMode: + if self.__read_preference is None: + # Save the read preference for getMore commands. + self.__read_preference = self.__collection._read_preference_for(self.session) + return self.__read_preference + + def _refresh(self) -> int: + """Refreshes the cursor with more data from Mongo. + + Returns the length of self.__data after refresh. Will exit early if + self.__data is already non-empty. Raises OperationFailure when the + cursor cannot be refreshed due to an error on the query. + """ + if len(self.__data) or self.__killed: + return len(self.__data) + + if not self.__session: + self.__session = self.__collection.database.client._ensure_session() + + if self.__id is None: # Query + if (self.__min or self.__max) and not self.__hint: + raise InvalidOperation( + "Passing a 'hint' is required when using the min/max query" + " option to ensure the query utilizes the correct index" + ) + q = self._query_class( + self.__query_flags, + self.__collection.database.name, + self.__collection.name, + self.__skip, + self.__query_spec(), + self.__projection, + self.__codec_options, + self._read_preference(), + self.__limit, + self.__batch_size, + self.__read_concern, + self.__collation, + self.__session, + self.__collection.database.client, + self.__allow_disk_use, + self.__exhaust, + ) + self.__send_message(q) + elif self.__id: # Get More + if self.__limit: + limit = self.__limit - self.__retrieved + if self.__batch_size: + limit = min(limit, self.__batch_size) + else: + limit = self.__batch_size + # Exhaust cursors don't send getMore messages. + g = self._getmore_class( + self.__dbname, + self.__collname, + limit, + self.__id, + self.__codec_options, + self._read_preference(), + self.__session, + self.__collection.database.client, + self.__max_await_time_ms, + self.__sock_mgr, + self.__exhaust, + self.__comment, + ) + self.__send_message(g) + + return len(self.__data) + + @property + def alive(self) -> bool: + """Does this cursor have the potential to return more data? + + This is mostly useful with `tailable cursors + <https://www.mongodb.com/docs/manual/core/tailable-cursors/>`_ + since they will stop iterating even though they *may* return more + results in the future. + + With regular cursors, simply use a for loop instead of :attr:`alive`:: + + for doc in collection.find(): + print(doc) + + .. note:: Even if :attr:`alive` is True, :meth:`next` can raise + :exc:`StopIteration`. :attr:`alive` can also be True while iterating + a cursor from a failed server. In this case :attr:`alive` will + return False after :meth:`next` fails to retrieve the next batch + of results from the server. + """ + return bool(len(self.__data) or (not self.__killed)) + + @property + def cursor_id(self) -> Optional[int]: + """Returns the id of the cursor + + .. versionadded:: 2.2 + """ + return self.__id + + @property + def address(self) -> Optional[Tuple[str, Any]]: + """The (host, port) of the server used, or None. + + .. versionchanged:: 3.0 + Renamed from "conn_id". + """ + return self.__address + + @property + def session(self) -> Optional[ClientSession]: + """The cursor's :class:`~pymongo.client_session.ClientSession`, or None. + + .. versionadded:: 3.6 + """ + if self.__explicit_session: + return self.__session + return None + + def __iter__(self) -> "Cursor[_DocumentType]": + return self + + def next(self) -> _DocumentType: + """Advance the cursor.""" + if self.__empty: + raise StopIteration + if len(self.__data) or self._refresh(): + return self.__data.popleft() + else: + raise StopIteration + + __next__ = next + + def __enter__(self) -> "Cursor[_DocumentType]": + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + self.close() + + def __copy__(self) -> "Cursor[_DocumentType]": + """Support function for `copy.copy()`. + + .. versionadded:: 2.4 + """ + return self._clone(deepcopy=False) + + def __deepcopy__(self, memo: Any) -> Any: + """Support function for `copy.deepcopy()`. + + .. versionadded:: 2.4 + """ + return self._clone(deepcopy=True) + + @overload + def _deepcopy(self, x: Iterable, memo: Optional[Dict[int, Union[List, Dict]]] = None) -> List: + ... + + @overload + def _deepcopy( + self, x: SupportsItems, memo: Optional[Dict[int, Union[List, Dict]]] = None + ) -> Dict: + ... + + def _deepcopy( + self, x: Union[Iterable, SupportsItems], memo: Optional[Dict[int, Union[List, Dict]]] = None + ) -> Union[List, Dict]: + """Deepcopy helper for the data dictionary or list. + + Regular expressions cannot be deep copied but as they are immutable we + don't have to copy them when cloning. + """ + y: Union[List, Dict] + iterator: Iterable[Tuple[Any, Any]] + if not hasattr(x, "items"): + y, is_list, iterator = [], True, enumerate(x) + else: + y, is_list, iterator = {}, False, cast("SupportsItems", x).items() + if memo is None: + memo = {} + val_id = id(x) + if val_id in memo: + return memo[val_id] + memo[val_id] = y + + for key, value in iterator: + if isinstance(value, (dict, list)) and not isinstance(value, SON): + value = self._deepcopy(value, memo) + elif not isinstance(value, RE_TYPE): + value = copy.deepcopy(value, memo) + + if is_list: + y.append(value) # type: ignore[union-attr] + else: + if not isinstance(key, RE_TYPE): + key = copy.deepcopy(key, memo) + y[key] = value + return y + + +class RawBatchCursor(Cursor, Generic[_DocumentType]): + """A cursor / iterator over raw batches of BSON data from a query result.""" + + _query_class = _RawBatchQuery + _getmore_class = _RawBatchGetMore + + def __init__(self, collection: Collection[_DocumentType], *args: Any, **kwargs: Any) -> None: + """Create a new cursor / iterator over raw batches of BSON data. + + Should not be called directly by application developers - + see :meth:`~pymongo.collection.Collection.find_raw_batches` + instead. + + .. seealso:: The MongoDB documentation on `cursors <https://dochub.mongodb.org/core/cursors>`_. + """ + super().__init__(collection, *args, **kwargs) + + def _unpack_response( + self, + response: Union[_OpReply, _OpMsg], + cursor_id: Optional[int], + codec_options: CodecOptions[Mapping[str, Any]], + user_fields: Optional[Mapping[str, Any]] = None, + legacy_response: bool = False, + ) -> List[_DocumentOut]: + raw_response = response.raw_response(cursor_id, user_fields=user_fields) + if not legacy_response: + # OP_MSG returns firstBatch/nextBatch documents as a BSON array + # Re-assemble the array of documents into a document stream + _convert_raw_document_lists_to_streams(raw_response[0]) + return cast(List["_DocumentOut"], raw_response) + + def explain(self) -> _DocumentType: + """Returns an explain plan record for this cursor. + + .. seealso:: The MongoDB documentation on `explain <https://dochub.mongodb.org/core/explain>`_. + """ + clone = self._clone(deepcopy=True, base=Cursor(self.collection)) + return clone.explain() + + def __getitem__(self, index: Any) -> NoReturn: + raise InvalidOperation("Cannot call __getitem__ on RawBatchCursor") diff --git a/backend/test/lib/python3.8/site-packages/pymongo/daemon.py b/backend/test/lib/python3.8/site-packages/pymongo/daemon.py new file mode 100644 index 0000000000000000000000000000000000000000..643eb58b6e162736f8d22ff078f3136e33303bf8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/daemon.py @@ -0,0 +1,143 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Support for spawning a daemon process. + +PyMongo only attempts to spawn the mongocryptd daemon process when automatic +client-side field level encryption is enabled. See +:ref:`automatic-client-side-encryption` for more info. +""" + +import os +import subprocess +import sys +import warnings +from typing import Optional, Sequence + +# The maximum amount of time to wait for the intermediate subprocess. +_WAIT_TIMEOUT = 10 +_THIS_FILE = os.path.realpath(__file__) + + +def _popen_wait(popen: subprocess.Popen, timeout: Optional[float]) -> Optional[int]: + """Implement wait timeout support for Python 3.""" + try: + return popen.wait(timeout=timeout) + except subprocess.TimeoutExpired: + # Silence TimeoutExpired errors. + return None + + +def _silence_resource_warning(popen: Optional[subprocess.Popen]) -> None: + """Silence Popen's ResourceWarning. + + Note this should only be used if the process was created as a daemon. + """ + # Set the returncode to avoid this warning when popen is garbage collected: + # "ResourceWarning: subprocess XXX is still running". + # See https://bugs.python.org/issue38890 and + # https://bugs.python.org/issue26741. + # popen is None when mongocryptd spawning fails + if popen is not None: + popen.returncode = 0 + + +if sys.platform == "win32": + # On Windows we spawn the daemon process simply by using DETACHED_PROCESS. + _DETACHED_PROCESS = getattr(subprocess, "DETACHED_PROCESS", 0x00000008) + + def _spawn_daemon(args: Sequence[str]) -> None: + """Spawn a daemon process (Windows).""" + try: + with open(os.devnull, "r+b") as devnull: + popen = subprocess.Popen( + args, + creationflags=_DETACHED_PROCESS, + stdin=devnull, + stderr=devnull, + stdout=devnull, + ) + _silence_resource_warning(popen) + except FileNotFoundError as exc: + warnings.warn( + f"Failed to start {args[0]}: is it on your $PATH?\nOriginal exception: {exc}", + RuntimeWarning, + stacklevel=2, + ) + +else: + # On Unix we spawn the daemon process with a double Popen. + # 1) The first Popen runs this file as a Python script using the current + # interpreter. + # 2) The script then decouples itself and performs the second Popen to + # spawn the daemon process. + # 3) The original process waits up to 10 seconds for the script to exit. + # + # Note that we do not call fork() directly because we want this procedure + # to be safe to call from any thread. Using Popen instead of fork also + # avoids triggering the application's os.register_at_fork() callbacks when + # we spawn the mongocryptd daemon process. + def _spawn(args: Sequence[str]) -> Optional[subprocess.Popen]: + """Spawn the process and silence stdout/stderr.""" + try: + with open(os.devnull, "r+b") as devnull: + return subprocess.Popen( + args, close_fds=True, stdin=devnull, stderr=devnull, stdout=devnull + ) + except FileNotFoundError as exc: + warnings.warn( + f"Failed to start {args[0]}: is it on your $PATH?\nOriginal exception: {exc}", + RuntimeWarning, + stacklevel=2, + ) + return None + + def _spawn_daemon_double_popen(args: Sequence[str]) -> None: + """Spawn a daemon process using a double subprocess.Popen.""" + spawner_args = [sys.executable, _THIS_FILE] + spawner_args.extend(args) + temp_proc = subprocess.Popen(spawner_args, close_fds=True) + # Reap the intermediate child process to avoid creating zombie + # processes. + _popen_wait(temp_proc, _WAIT_TIMEOUT) + + def _spawn_daemon(args: Sequence[str]) -> None: + """Spawn a daemon process (Unix).""" + # "If Python is unable to retrieve the real path to its executable, + # sys.executable will be an empty string or None". + if sys.executable: + _spawn_daemon_double_popen(args) + else: + # Fallback to spawn a non-daemon process without silencing the + # resource warning. We do not use fork here because it is not + # safe to call from a thread on all systems. + # Unfortunately, this means that: + # 1) If the parent application is killed via Ctrl-C, the + # non-daemon process will also be killed. + # 2) Each non-daemon process will hang around as a zombie process + # until the main application exits. + _spawn(args) + + if __name__ == "__main__": + # Attempt to start a new session to decouple from the parent. + if hasattr(os, "setsid"): + try: + os.setsid() + except OSError: + pass + + # We are performing a double fork (Popen) to spawn the process as a + # daemon so it is safe to ignore the resource warning. + _silence_resource_warning(_spawn(sys.argv[1:])) + os._exit(0) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/database.py b/backend/test/lib/python3.8/site-packages/pymongo/database.py new file mode 100644 index 0000000000000000000000000000000000000000..133061424f7646160c16c5dc2d831c479e7e4f18 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/database.py @@ -0,0 +1,1386 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Database level operations.""" +from __future__ import annotations + +from copy import deepcopy +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generic, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Sequence, + TypeVar, + Union, + cast, + overload, +) + +from bson.codec_options import DEFAULT_CODEC_OPTIONS, CodecOptions +from bson.dbref import DBRef +from bson.son import SON +from bson.timestamp import Timestamp +from pymongo import _csot, common +from pymongo.aggregation import _DatabaseAggregationCommand +from pymongo.change_stream import DatabaseChangeStream +from pymongo.collection import Collection +from pymongo.command_cursor import CommandCursor +from pymongo.common import _ecoc_coll_name, _esc_coll_name +from pymongo.errors import CollectionInvalid, InvalidName, InvalidOperation +from pymongo.read_preferences import ReadPreference, _ServerMode +from pymongo.typings import _CollationIn, _DocumentType, _DocumentTypeArg, _Pipeline + +if TYPE_CHECKING: + from pymongo.pool import Connection + from pymongo.server import Server + + +def _check_name(name: str) -> None: + """Check if a database name is valid.""" + if not name: + raise InvalidName("database name cannot be the empty string") + + for invalid_char in [" ", ".", "$", "/", "\\", "\x00", '"']: + if invalid_char in name: + raise InvalidName("database names cannot contain the character %r" % invalid_char) + + +if TYPE_CHECKING: + import bson + import bson.codec_options + from pymongo.client_session import ClientSession + from pymongo.mongo_client import MongoClient + from pymongo.read_concern import ReadConcern + from pymongo.write_concern import WriteConcern + + +_CodecDocumentType = TypeVar("_CodecDocumentType", bound=Mapping[str, Any]) + + +class Database(common.BaseObject, Generic[_DocumentType]): + """A Mongo database.""" + + def __init__( + self, + client: "MongoClient[_DocumentType]", + name: str, + codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, + read_preference: Optional[_ServerMode] = None, + write_concern: Optional[WriteConcern] = None, + read_concern: Optional[ReadConcern] = None, + ) -> None: + """Get a database by client and name. + + Raises :class:`TypeError` if `name` is not an instance of + :class:`str`. Raises :class:`~pymongo.errors.InvalidName` if + `name` is not a valid database name. + + :Parameters: + - `client`: A :class:`~pymongo.mongo_client.MongoClient` instance. + - `name`: The database name. + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. If ``None`` (the + default) client.codec_options is used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) client.read_preference is used. + - `write_concern` (optional): An instance of + :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the + default) client.write_concern is used. + - `read_concern` (optional): An instance of + :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the + default) client.read_concern is used. + + .. seealso:: The MongoDB documentation on `databases <https://dochub.mongodb.org/core/databases>`_. + + .. versionchanged:: 4.0 + Removed the eval, system_js, error, last_status, previous_error, + reset_error_history, authenticate, logout, collection_names, + current_op, add_user, remove_user, profiling_level, + set_profiling_level, and profiling_info methods. + See the :ref:`pymongo4-migration-guide`. + + .. versionchanged:: 3.2 + Added the read_concern option. + + .. versionchanged:: 3.0 + Added the codec_options, read_preference, and write_concern options. + :class:`~pymongo.database.Database` no longer returns an instance + of :class:`~pymongo.collection.Collection` for attribute names + with leading underscores. You must use dict-style lookups instead:: + + db['__my_collection__'] + + Not: + + db.__my_collection__ + """ + super().__init__( + codec_options or client.codec_options, + read_preference or client.read_preference, + write_concern or client.write_concern, + read_concern or client.read_concern, + ) + + if not isinstance(name, str): + raise TypeError("name must be an instance of str") + + if name != "$external": + _check_name(name) + + self.__name = name + self.__client: MongoClient[_DocumentType] = client + self._timeout = client.options.timeout + + @property + def client(self) -> "MongoClient[_DocumentType]": + """The client instance for this :class:`Database`.""" + return self.__client + + @property + def name(self) -> str: + """The name of this :class:`Database`.""" + return self.__name + + def with_options( + self, + codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, + read_preference: Optional[_ServerMode] = None, + write_concern: Optional[WriteConcern] = None, + read_concern: Optional[ReadConcern] = None, + ) -> "Database[_DocumentType]": + """Get a clone of this database changing the specified settings. + + >>> db1.read_preference + Primary() + >>> from pymongo.read_preferences import Secondary + >>> db2 = db1.with_options(read_preference=Secondary([{'node': 'analytics'}])) + >>> db1.read_preference + Primary() + >>> db2.read_preference + Secondary(tag_sets=[{'node': 'analytics'}], max_staleness=-1, hedge=None) + + :Parameters: + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. If ``None`` (the + default) the :attr:`codec_options` of this :class:`Collection` + is used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) the :attr:`read_preference` of this + :class:`Collection` is used. See :mod:`~pymongo.read_preferences` + for options. + - `write_concern` (optional): An instance of + :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the + default) the :attr:`write_concern` of this :class:`Collection` + is used. + - `read_concern` (optional): An instance of + :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the + default) the :attr:`read_concern` of this :class:`Collection` + is used. + + .. versionadded:: 3.8 + """ + return Database( + self.client, + self.__name, + codec_options or self.codec_options, + read_preference or self.read_preference, + write_concern or self.write_concern, + read_concern or self.read_concern, + ) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, Database): + return self.__client == other.client and self.__name == other.name + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __hash__(self) -> int: + return hash((self.__client, self.__name)) + + def __repr__(self) -> str: + return f"Database({self.__client!r}, {self.__name!r})" + + def __getattr__(self, name: str) -> Collection[_DocumentType]: + """Get a collection of this database by name. + + Raises InvalidName if an invalid collection name is used. + + :Parameters: + - `name`: the name of the collection to get + """ + if name.startswith("_"): + raise AttributeError( + "Database has no attribute {!r}. To access the {}" + " collection, use database[{!r}].".format(name, name, name) + ) + return self.__getitem__(name) + + def __getitem__(self, name: str) -> "Collection[_DocumentType]": + """Get a collection of this database by name. + + Raises InvalidName if an invalid collection name is used. + + :Parameters: + - `name`: the name of the collection to get + """ + return Collection(self, name) + + def get_collection( + self, + name: str, + codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, + read_preference: Optional[_ServerMode] = None, + write_concern: Optional[WriteConcern] = None, + read_concern: Optional[ReadConcern] = None, + ) -> Collection[_DocumentType]: + """Get a :class:`~pymongo.collection.Collection` with the given name + and options. + + Useful for creating a :class:`~pymongo.collection.Collection` with + different codec options, read preference, and/or write concern from + this :class:`Database`. + + >>> db.read_preference + Primary() + >>> coll1 = db.test + >>> coll1.read_preference + Primary() + >>> from pymongo import ReadPreference + >>> coll2 = db.get_collection( + ... 'test', read_preference=ReadPreference.SECONDARY) + >>> coll2.read_preference + Secondary(tag_sets=None) + + :Parameters: + - `name`: The name of the collection - a string. + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. If ``None`` (the + default) the :attr:`codec_options` of this :class:`Database` is + used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) the :attr:`read_preference` of this + :class:`Database` is used. See :mod:`~pymongo.read_preferences` + for options. + - `write_concern` (optional): An instance of + :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the + default) the :attr:`write_concern` of this :class:`Database` is + used. + - `read_concern` (optional): An instance of + :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the + default) the :attr:`read_concern` of this :class:`Database` is + used. + """ + return Collection( + self, + name, + False, + codec_options, + read_preference, + write_concern, + read_concern, + ) + + def _get_encrypted_fields( + self, kwargs: Mapping[str, Any], coll_name: str, ask_db: bool + ) -> Optional[Mapping[str, Any]]: + encrypted_fields = kwargs.get("encryptedFields") + if encrypted_fields: + return deepcopy(encrypted_fields) + if ( + self.client.options.auto_encryption_opts + and self.client.options.auto_encryption_opts._encrypted_fields_map + and self.client.options.auto_encryption_opts._encrypted_fields_map.get( + f"{self.name}.{coll_name}" + ) + ): + return deepcopy( + self.client.options.auto_encryption_opts._encrypted_fields_map[ + f"{self.name}.{coll_name}" + ] + ) + if ask_db and self.client.options.auto_encryption_opts: + options = self[coll_name].options() + if options.get("encryptedFields"): + return deepcopy(options["encryptedFields"]) + return None + + @_csot.apply + def create_collection( + self, + name: str, + codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, + read_preference: Optional[_ServerMode] = None, + write_concern: Optional[WriteConcern] = None, + read_concern: Optional[ReadConcern] = None, + session: Optional[ClientSession] = None, + check_exists: Optional[bool] = True, + **kwargs: Any, + ) -> Collection[_DocumentType]: + """Create a new :class:`~pymongo.collection.Collection` in this + database. + + Normally collection creation is automatic. This method should + only be used to specify options on + creation. :class:`~pymongo.errors.CollectionInvalid` will be + raised if the collection already exists. + + :Parameters: + - `name`: the name of the collection to create + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. If ``None`` (the + default) the :attr:`codec_options` of this :class:`Database` is + used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) the :attr:`read_preference` of this + :class:`Database` is used. + - `write_concern` (optional): An instance of + :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the + default) the :attr:`write_concern` of this :class:`Database` is + used. + - `read_concern` (optional): An instance of + :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the + default) the :attr:`read_concern` of this :class:`Database` is + used. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - ``check_exists`` (optional): if True (the default), send a listCollections command to + check if the collection already exists before creation. + - `**kwargs` (optional): additional keyword arguments will + be passed as options for the `create collection command`_ + + All optional `create collection command`_ parameters should be passed + as keyword arguments to this method. Valid options include, but are not + limited to: + + - ``size`` (int): desired initial size for the collection (in + bytes). For capped collections this size is the max + size of the collection. + - ``capped`` (bool): if True, this is a capped collection + - ``max`` (int): maximum number of objects if capped (optional) + - ``timeseries`` (dict): a document specifying configuration options for + timeseries collections + - ``expireAfterSeconds`` (int): the number of seconds after which a + document in a timeseries collection expires + - ``validator`` (dict): a document specifying validation rules or expressions + for the collection + - ``validationLevel`` (str): how strictly to apply the + validation rules to existing documents during an update. The default level + is "strict" + - ``validationAction`` (str): whether to "error" on invalid documents + (the default) or just "warn" about the violations but allow invalid + documents to be inserted + - ``indexOptionDefaults`` (dict): a document specifying a default configuration + for indexes when creating a collection + - ``viewOn`` (str): the name of the source collection or view from which + to create the view + - ``pipeline`` (list): a list of aggregation pipeline stages + - ``comment`` (str): a user-provided comment to attach to this command. + This option is only supported on MongoDB >= 4.4. + - ``encryptedFields`` (dict): **(BETA)** Document that describes the encrypted fields for + Queryable Encryption. For example:: + + { + "escCollection": "enxcol_.encryptedCollection.esc", + "ecocCollection": "enxcol_.encryptedCollection.ecoc", + "fields": [ + { + "path": "firstName", + "keyId": Binary.from_uuid(UUID('00000000-0000-0000-0000-000000000000')), + "bsonType": "string", + "queries": {"queryType": "equality"} + }, + { + "path": "ssn", + "keyId": Binary.from_uuid(UUID('04104104-1041-0410-4104-104104104104')), + "bsonType": "string" + } + ] + } + - ``clusteredIndex`` (dict): Document that specifies the clustered index + configuration. It must have the following form:: + + { + // key pattern must be {_id: 1} + key: <key pattern>, // required + unique: <bool>, // required, must be `true` + name: <string>, // optional, otherwise automatically generated + v: <int>, // optional, must be `2` if provided + } + - ``changeStreamPreAndPostImages`` (dict): a document with a boolean field ``enabled`` for + enabling pre- and post-images. + + .. versionchanged:: 4.2 + Added the ``check_exists``, ``clusteredIndex``, and ``encryptedFields`` parameters. + + .. versionchanged:: 3.11 + This method is now supported inside multi-document transactions + with MongoDB 4.4+. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.4 + Added the collation option. + + .. versionchanged:: 3.0 + Added the codec_options, read_preference, and write_concern options. + + .. _create collection command: + https://mongodb.com/docs/manual/reference/command/create + """ + encrypted_fields = self._get_encrypted_fields(kwargs, name, False) + if encrypted_fields: + common.validate_is_mapping("encryptedFields", encrypted_fields) + kwargs["encryptedFields"] = encrypted_fields + + clustered_index = kwargs.get("clusteredIndex") + if clustered_index: + common.validate_is_mapping("clusteredIndex", clustered_index) + + with self.__client._tmp_session(session) as s: + # Skip this check in a transaction where listCollections is not + # supported. + if ( + check_exists + and (not s or not s.in_transaction) + and name in self.list_collection_names(filter={"name": name}, session=s) + ): + raise CollectionInvalid("collection %s already exists" % name) + return Collection( + self, + name, + True, + codec_options, + read_preference, + write_concern, + read_concern, + session=s, + **kwargs, + ) + + def aggregate( + self, pipeline: _Pipeline, session: Optional[ClientSession] = None, **kwargs: Any + ) -> CommandCursor[_DocumentType]: + """Perform a database-level aggregation. + + See the `aggregation pipeline`_ documentation for a list of stages + that are supported. + + .. code-block:: python + + # Lists all operations currently running on the server. + with client.admin.aggregate([{"$currentOp": {}}]) as cursor: + for operation in cursor: + print(operation) + + The :meth:`aggregate` method obeys the :attr:`read_preference` of this + :class:`Database`, except when ``$out`` or ``$merge`` are used, in + which case :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY` + is used. + + .. note:: This method does not support the 'explain' option. Please + use :meth:`~pymongo.database.Database.command` instead. + + .. note:: The :attr:`~pymongo.database.Database.write_concern` of + this collection is automatically applied to this operation. + + :Parameters: + - `pipeline`: a list of aggregation pipeline stages + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `**kwargs` (optional): extra `aggregate command`_ parameters. + + All optional `aggregate command`_ parameters should be passed as + keyword arguments to this method. Valid options include, but are not + limited to: + + - `allowDiskUse` (bool): Enables writing to temporary files. When set + to True, aggregation stages can write data to the _tmp subdirectory + of the --dbpath directory. The default is False. + - `maxTimeMS` (int): The maximum amount of time to allow the operation + to run in milliseconds. + - `batchSize` (int): The maximum number of documents to return per + batch. Ignored if the connected mongod or mongos does not support + returning aggregate results using a cursor. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `let` (dict): A dict of parameter names and values. Values must be + constant or closed expressions that do not reference document + fields. Parameters can then be accessed as variables in an + aggregate expression context (e.g. ``"$$var"``). This option is + only supported on MongoDB >= 5.0. + + :Returns: + A :class:`~pymongo.command_cursor.CommandCursor` over the result + set. + + .. versionadded:: 3.9 + + .. _aggregation pipeline: + https://mongodb.com/docs/manual/reference/operator/aggregation-pipeline + + .. _aggregate command: + https://mongodb.com/docs/manual/reference/command/aggregate + """ + with self.client._tmp_session(session, close=False) as s: + cmd = _DatabaseAggregationCommand( + self, + CommandCursor, + pipeline, + kwargs, + session is not None, + user_fields={"cursor": {"firstBatch": 1}}, + ) + return self.client._retryable_read( + cmd.get_cursor, cmd.get_read_preference(s), s, retryable=not cmd._performs_write # type: ignore[arg-type] + ) + + def watch( + self, + pipeline: Optional[_Pipeline] = None, + full_document: Optional[str] = None, + resume_after: Optional[Mapping[str, Any]] = None, + max_await_time_ms: Optional[int] = None, + batch_size: Optional[int] = None, + collation: Optional[_CollationIn] = None, + start_at_operation_time: Optional[Timestamp] = None, + session: Optional[ClientSession] = None, + start_after: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + full_document_before_change: Optional[str] = None, + show_expanded_events: Optional[bool] = None, + ) -> DatabaseChangeStream[_DocumentType]: + """Watch changes on this database. + + Performs an aggregation with an implicit initial ``$changeStream`` + stage and returns a + :class:`~pymongo.change_stream.DatabaseChangeStream` cursor which + iterates over changes on all collections in this database. + + Introduced in MongoDB 4.0. + + .. code-block:: python + + with db.watch() as stream: + for change in stream: + print(change) + + The :class:`~pymongo.change_stream.DatabaseChangeStream` iterable + blocks until the next change document is returned or an error is + raised. If the + :meth:`~pymongo.change_stream.DatabaseChangeStream.next` method + encounters a network error when retrieving a batch from the server, + it will automatically attempt to recreate the cursor such that no + change events are missed. Any error encountered during the resume + attempt indicates there may be an outage and will be raised. + + .. code-block:: python + + try: + with db.watch([{"$match": {"operationType": "insert"}}]) as stream: + for insert_change in stream: + print(insert_change) + except pymongo.errors.PyMongoError: + # The ChangeStream encountered an unrecoverable error or the + # resume attempt failed to recreate the cursor. + logging.error("...") + + For a precise description of the resume process see the + `change streams specification`_. + + :Parameters: + - `pipeline` (optional): A list of aggregation pipeline stages to + append to an initial ``$changeStream`` stage. Not all + pipeline stages are valid after a ``$changeStream`` stage, see the + MongoDB documentation on change streams for the supported stages. + - `full_document` (optional): The fullDocument to pass as an option + to the ``$changeStream`` stage. Allowed values: 'updateLookup', + 'whenAvailable', 'required'. When set to 'updateLookup', the + change notification for partial updates will include both a delta + describing the changes to the document, as well as a copy of the + entire document that was changed from some time after the change + occurred. + - `full_document_before_change`: Allowed values: 'whenAvailable' + and 'required'. Change events may now result in a + 'fullDocumentBeforeChange' response field. + - `resume_after` (optional): A resume token. If provided, the + change stream will start returning changes that occur directly + after the operation specified in the resume token. A resume token + is the _id value of a change document. + - `max_await_time_ms` (optional): The maximum time in milliseconds + for the server to wait for changes before responding to a getMore + operation. + - `batch_size` (optional): The maximum number of documents to return + per batch. + - `collation` (optional): The :class:`~pymongo.collation.Collation` + to use for the aggregation. + - `start_at_operation_time` (optional): If provided, the resulting + change stream will only return changes that occurred at or after + the specified :class:`~bson.timestamp.Timestamp`. Requires + MongoDB >= 4.0. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `start_after` (optional): The same as `resume_after` except that + `start_after` can resume notifications after an invalidate event. + This option and `resume_after` are mutually exclusive. + - `comment` (optional): A user-provided comment to attach to this + command. + - `show_expanded_events` (optional): Include expanded events such as DDL events like `dropIndexes`. + + :Returns: + A :class:`~pymongo.change_stream.DatabaseChangeStream` cursor. + + .. versionchanged:: 4.3 + Added `show_expanded_events` parameter. + + .. versionchanged:: 4.2 + Added ``full_document_before_change`` parameter. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.9 + Added the ``start_after`` parameter. + + .. versionadded:: 3.7 + + .. seealso:: The MongoDB documentation on `changeStreams <https://mongodb.com/docs/manual/changeStreams/>`_. + + .. _change streams specification: + https://github.com/mongodb/specifications/blob/master/source/change-streams/change-streams.rst + """ + return DatabaseChangeStream( + self, + pipeline, + full_document, + resume_after, + max_await_time_ms, + batch_size, + collation, + start_at_operation_time, + session, + start_after, + comment, + full_document_before_change, + show_expanded_events=show_expanded_events, + ) + + @overload + def _command( + self, + conn: Connection, + command: Union[str, MutableMapping[str, Any]], + value: int = 1, + check: bool = True, + allowable_errors: Optional[Sequence[Union[str, int]]] = None, + read_preference: _ServerMode = ReadPreference.PRIMARY, + codec_options: CodecOptions[Dict[str, Any]] = DEFAULT_CODEC_OPTIONS, + write_concern: Optional[WriteConcern] = None, + parse_write_concern_error: bool = False, + session: Optional[ClientSession] = None, + **kwargs: Any, + ) -> Dict[str, Any]: + ... + + @overload + def _command( + self, + conn: Connection, + command: Union[str, MutableMapping[str, Any]], + value: int = 1, + check: bool = True, + allowable_errors: Optional[Sequence[Union[str, int]]] = None, + read_preference: _ServerMode = ReadPreference.PRIMARY, + codec_options: CodecOptions[_CodecDocumentType] = ..., + write_concern: Optional[WriteConcern] = None, + parse_write_concern_error: bool = False, + session: Optional[ClientSession] = None, + **kwargs: Any, + ) -> _CodecDocumentType: + ... + + def _command( + self, + conn: Connection, + command: Union[str, MutableMapping[str, Any]], + value: int = 1, + check: bool = True, + allowable_errors: Optional[Sequence[Union[str, int]]] = None, + read_preference: _ServerMode = ReadPreference.PRIMARY, + codec_options: Union[ + CodecOptions[Dict[str, Any]], CodecOptions[_CodecDocumentType] + ] = DEFAULT_CODEC_OPTIONS, + write_concern: Optional[WriteConcern] = None, + parse_write_concern_error: bool = False, + session: Optional[ClientSession] = None, + **kwargs: Any, + ) -> Union[Dict[str, Any], _CodecDocumentType]: + """Internal command helper.""" + if isinstance(command, str): + command = SON([(command, value)]) + + command.update(kwargs) + with self.__client._tmp_session(session) as s: + return conn.command( + self.__name, + command, + read_preference, + codec_options, + check, + allowable_errors, + write_concern=write_concern, + parse_write_concern_error=parse_write_concern_error, + session=s, + client=self.__client, + ) + + @overload + def command( + self, + command: Union[str, MutableMapping[str, Any]], + value: Any = 1, + check: bool = True, + allowable_errors: Optional[Sequence[Union[str, int]]] = None, + read_preference: Optional[_ServerMode] = None, + codec_options: None = None, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> Dict[str, Any]: + ... + + @overload + def command( + self, + command: Union[str, MutableMapping[str, Any]], + value: Any = 1, + check: bool = True, + allowable_errors: Optional[Sequence[Union[str, int]]] = None, + read_preference: Optional[_ServerMode] = None, + codec_options: CodecOptions[_CodecDocumentType] = ..., + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> _CodecDocumentType: + ... + + @_csot.apply + def command( + self, + command: Union[str, MutableMapping[str, Any]], + value: Any = 1, + check: bool = True, + allowable_errors: Optional[Sequence[Union[str, int]]] = None, + read_preference: Optional[_ServerMode] = None, + codec_options: "Optional[bson.codec_options.CodecOptions[_CodecDocumentType]]" = None, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> Union[Dict[str, Any], _CodecDocumentType]: + """Issue a MongoDB command. + + Send command `command` to the database and return the + response. If `command` is an instance of :class:`str` + then the command {`command`: `value`} will be sent. + Otherwise, `command` must be an instance of + :class:`dict` and will be sent as is. + + Any additional keyword arguments will be added to the final + command document before it is sent. + + For example, a command like ``{buildinfo: 1}`` can be sent + using: + + >>> db.command("buildinfo") + + For a command where the value matters, like ``{count: + collection_name}`` we can do: + + >>> db.command("count", collection_name) + + For commands that take additional arguments we can use + kwargs. So ``{filemd5: object_id, root: file_root}`` becomes: + + >>> db.command("filemd5", object_id, root=file_root) + + :Parameters: + - `command`: document representing the command to be issued, + or the name of the command (for simple commands only). + + .. note:: the order of keys in the `command` document is + significant (the "verb" must come first), so commands + which require multiple keys (e.g. `findandmodify`) + should use an instance of :class:`~bson.son.SON` or + a string and kwargs instead of a Python `dict`. + + - `value` (optional): value to use for the command verb when + `command` is passed as a string + - `check` (optional): check the response for errors, raising + :class:`~pymongo.errors.OperationFailure` if there are any + - `allowable_errors`: if `check` is ``True``, error messages + in this list will be ignored by error-checking + - `read_preference` (optional): The read preference for this + operation. See :mod:`~pymongo.read_preferences` for options. + If the provided `session` is in a transaction, defaults to the + read preference configured for the transaction. + Otherwise, defaults to + :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. + - `codec_options`: A :class:`~bson.codec_options.CodecOptions` + instance. + - `session` (optional): A + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): additional keyword arguments will + be added to the command document before it is sent + + + .. note:: :meth:`command` does **not** obey this Database's + :attr:`read_preference` or :attr:`codec_options`. You must use the + ``read_preference`` and ``codec_options`` parameters instead. + + .. note:: :meth:`command` does **not** apply any custom TypeDecoders + when decoding the command response. + + .. note:: If this client has been configured to use MongoDB Stable + API (see :ref:`versioned-api-ref`), then :meth:`command` will + automatically add API versioning options to the given command. + Explicitly adding API versioning options in the command and + declaring an API version on the client is not supported. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.0 + Removed the `as_class`, `fields`, `uuid_subtype`, `tag_sets`, + and `secondary_acceptable_latency_ms` option. + Removed `compile_re` option: PyMongo now always represents BSON + regular expressions as :class:`~bson.regex.Regex` objects. Use + :meth:`~bson.regex.Regex.try_compile` to attempt to convert from a + BSON regular expression to a Python regular expression object. + Added the ``codec_options`` parameter. + + .. seealso:: The MongoDB documentation on `commands <https://dochub.mongodb.org/core/commands>`_. + """ + opts = codec_options or DEFAULT_CODEC_OPTIONS + if comment is not None: + kwargs["comment"] = comment + + if read_preference is None: + read_preference = (session and session._txn_read_preference()) or ReadPreference.PRIMARY + with self.__client._conn_for_reads(read_preference, session) as ( + connection, + read_preference, + ): + return self._command( + connection, + command, + value, + check, + allowable_errors, + read_preference, + opts, + session=session, + **kwargs, + ) + + @_csot.apply + def cursor_command( + self, + command: Union[str, MutableMapping[str, Any]], + value: Any = 1, + read_preference: Optional[_ServerMode] = None, + codec_options: Optional[bson.codec_options.CodecOptions[_CodecDocumentType]] = None, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + max_await_time_ms: Optional[int] = None, + **kwargs: Any, + ) -> CommandCursor: + """Issue a MongoDB command and parse the response as a cursor. + + If the response from the server does not include a cursor field, an error will be thrown. + + Otherwise, behaves identically to issuing a normal MongoDB command. + + :Parameters: + - `command`: document representing the command to be issued, + or the name of the command (for simple commands only). + + .. note:: the order of keys in the `command` document is + significant (the "verb" must come first), so commands + which require multiple keys (e.g. `findandmodify`) + should use an instance of :class:`~bson.son.SON` or + a string and kwargs instead of a Python `dict`. + + - `value` (optional): value to use for the command verb when + `command` is passed as a string + - `read_preference` (optional): The read preference for this + operation. See :mod:`~pymongo.read_preferences` for options. + If the provided `session` is in a transaction, defaults to the + read preference configured for the transaction. + Otherwise, defaults to + :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. + - `codec_options`: A :class:`~bson.codec_options.CodecOptions` + instance. + - `session` (optional): A + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to future getMores for this + command. + - `max_await_time_ms` (optional): The number of ms to wait for more data on future getMores for this command. + - `**kwargs` (optional): additional keyword arguments will + be added to the command document before it is sent + + .. note:: :meth:`command` does **not** obey this Database's + :attr:`read_preference` or :attr:`codec_options`. You must use the + ``read_preference`` and ``codec_options`` parameters instead. + + .. note:: :meth:`command` does **not** apply any custom TypeDecoders + when decoding the command response. + + .. note:: If this client has been configured to use MongoDB Stable + API (see :ref:`versioned-api-ref`), then :meth:`command` will + automatically add API versioning options to the given command. + Explicitly adding API versioning options in the command and + declaring an API version on the client is not supported. + + .. seealso:: The MongoDB documentation on `commands <https://dochub.mongodb.org/core/commands>`_. + """ + with self.__client._tmp_session(session, close=False) as tmp_session: + opts = codec_options or DEFAULT_CODEC_OPTIONS + + if read_preference is None: + read_preference = ( + tmp_session and tmp_session._txn_read_preference() + ) or ReadPreference.PRIMARY + with self.__client._conn_for_reads(read_preference, tmp_session) as ( + conn, + read_preference, + ): + response = self._command( + conn, + command, + value, + True, + None, + read_preference, + opts, + session=tmp_session, + **kwargs, + ) + coll = self.get_collection("$cmd", read_preference=read_preference) + if response.get("cursor"): + cmd_cursor = CommandCursor( + coll, + response["cursor"], + conn.address, + max_await_time_ms=max_await_time_ms, + session=tmp_session, + explicit_session=session is not None, + comment=comment, + ) + cmd_cursor._maybe_pin_connection(conn) + return cmd_cursor + else: + raise InvalidOperation("Command does not return a cursor.") + + def _retryable_read_command( + self, + command: Union[str, MutableMapping[str, Any]], + session: Optional[ClientSession] = None, + ) -> Dict[str, Any]: + """Same as command but used for retryable read commands.""" + read_preference = (session and session._txn_read_preference()) or ReadPreference.PRIMARY + + def _cmd( + session: Optional[ClientSession], + server: Server, + conn: Connection, + read_preference: _ServerMode, + ) -> Dict[str, Any]: + return self._command( + conn, + command, + read_preference=read_preference, + session=session, + ) + + return self.__client._retryable_read(_cmd, read_preference, session) + + def _list_collections( + self, + conn: Connection, + session: Optional[ClientSession], + read_preference: _ServerMode, + **kwargs: Any, + ) -> CommandCursor[MutableMapping[str, Any]]: + """Internal listCollections helper.""" + coll = cast( + Collection[MutableMapping[str, Any]], + self.get_collection("$cmd", read_preference=read_preference), + ) + cmd = SON([("listCollections", 1), ("cursor", {})]) + cmd.update(kwargs) + with self.__client._tmp_session(session, close=False) as tmp_session: + cursor = self._command(conn, cmd, read_preference=read_preference, session=tmp_session)[ + "cursor" + ] + cmd_cursor = CommandCursor( + coll, + cursor, + conn.address, + session=tmp_session, + explicit_session=session is not None, + comment=cmd.get("comment"), + ) + cmd_cursor._maybe_pin_connection(conn) + return cmd_cursor + + def list_collections( + self, + session: Optional[ClientSession] = None, + filter: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> CommandCursor[MutableMapping[str, Any]]: + """Get a cursor over the collections of this database. + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `filter` (optional): A query document to filter the list of + collections returned from the listCollections command. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): Optional parameters of the + `listCollections command + <https://mongodb.com/docs/manual/reference/command/listCollections/>`_ + can be passed as keyword arguments to this method. The supported + options differ by server version. + + + :Returns: + An instance of :class:`~pymongo.command_cursor.CommandCursor`. + + .. versionadded:: 3.6 + """ + if filter is not None: + kwargs["filter"] = filter + read_pref = (session and session._txn_read_preference()) or ReadPreference.PRIMARY + if comment is not None: + kwargs["comment"] = comment + + def _cmd( + session: Optional[ClientSession], + server: Server, + conn: Connection, + read_preference: _ServerMode, + ) -> CommandCursor[MutableMapping[str, Any]]: + return self._list_collections(conn, session, read_preference=read_preference, **kwargs) + + return self.__client._retryable_read(_cmd, read_pref, session) + + def list_collection_names( + self, + session: Optional[ClientSession] = None, + filter: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> List[str]: + """Get a list of all the collection names in this database. + + For example, to list all non-system collections:: + + filter = {"name": {"$regex": r"^(?!system\\.)"}} + db.list_collection_names(filter=filter) + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `filter` (optional): A query document to filter the list of + collections returned from the listCollections command. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): Optional parameters of the + `listCollections command + <https://mongodb.com/docs/manual/reference/command/listCollections/>`_ + can be passed as keyword arguments to this method. The supported + options differ by server version. + + + .. versionchanged:: 3.8 + Added the ``filter`` and ``**kwargs`` parameters. + + .. versionadded:: 3.6 + """ + if comment is not None: + kwargs["comment"] = comment + if filter is None: + kwargs["nameOnly"] = True + + else: + # The enumerate collections spec states that "drivers MUST NOT set + # nameOnly if a filter specifies any keys other than name." + common.validate_is_mapping("filter", filter) + kwargs["filter"] = filter + if not filter or (len(filter) == 1 and "name" in filter): + kwargs["nameOnly"] = True + + return [result["name"] for result in self.list_collections(session=session, **kwargs)] + + def _drop_helper( + self, name: str, session: Optional[ClientSession] = None, comment: Optional[Any] = None + ) -> Dict[str, Any]: + command = SON([("drop", name)]) + if comment is not None: + command["comment"] = comment + + with self.__client._conn_for_writes(session) as connection: + return self._command( + connection, + command, + allowable_errors=["ns not found", 26], + write_concern=self._write_concern_for(session), + parse_write_concern_error=True, + session=session, + ) + + @_csot.apply + def drop_collection( + self, + name_or_collection: Union[str, Collection[_DocumentTypeArg]], + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + encrypted_fields: Optional[Mapping[str, Any]] = None, + ) -> Dict[str, Any]: + """Drop a collection. + + :Parameters: + - `name_or_collection`: the name of a collection to drop or the + collection object itself + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `encrypted_fields`: **(BETA)** Document that describes the encrypted fields for + Queryable Encryption. For example:: + + { + "escCollection": "enxcol_.encryptedCollection.esc", + "ecocCollection": "enxcol_.encryptedCollection.ecoc", + "fields": [ + { + "path": "firstName", + "keyId": Binary.from_uuid(UUID('00000000-0000-0000-0000-000000000000')), + "bsonType": "string", + "queries": {"queryType": "equality"} + }, + { + "path": "ssn", + "keyId": Binary.from_uuid(UUID('04104104-1041-0410-4104-104104104104')), + "bsonType": "string" + } + ] + + } + + + .. note:: The :attr:`~pymongo.database.Database.write_concern` of + this database is automatically applied to this operation. + + .. versionchanged:: 4.2 + Added ``encrypted_fields`` parameter. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. versionchanged:: 3.4 + Apply this database's write concern automatically to this operation + when connected to MongoDB >= 3.4. + + """ + name = name_or_collection + if isinstance(name, Collection): + name = name.name + + if not isinstance(name, str): + raise TypeError("name_or_collection must be an instance of str") + encrypted_fields = self._get_encrypted_fields( + {"encryptedFields": encrypted_fields}, + name, + True, + ) + if encrypted_fields: + common.validate_is_mapping("encrypted_fields", encrypted_fields) + self._drop_helper( + _esc_coll_name(encrypted_fields, name), session=session, comment=comment + ) + self._drop_helper( + _ecoc_coll_name(encrypted_fields, name), session=session, comment=comment + ) + + return self._drop_helper(name, session, comment) + + def validate_collection( + self, + name_or_collection: Union[str, Collection[_DocumentTypeArg]], + scandata: bool = False, + full: bool = False, + session: Optional[ClientSession] = None, + background: Optional[bool] = None, + comment: Optional[Any] = None, + ) -> Dict[str, Any]: + """Validate a collection. + + Returns a dict of validation info. Raises CollectionInvalid if + validation fails. + + See also the MongoDB documentation on the `validate command`_. + + :Parameters: + - `name_or_collection`: A Collection object or the name of a + collection to validate. + - `scandata`: Do extra checks beyond checking the overall + structure of the collection. + - `full`: Have the server do a more thorough scan of the + collection. Use with `scandata` for a thorough scan + of the structure of the collection and the individual + documents. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `background` (optional): A boolean flag that determines whether + the command runs in the background. Requires MongoDB 4.4+. + - `comment` (optional): A user-provided comment to attach to this + command. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.11 + Added ``background`` parameter. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. _validate command: https://mongodb.com/docs/manual/reference/command/validate/ + """ + name = name_or_collection + if isinstance(name, Collection): + name = name.name + + if not isinstance(name, str): + raise TypeError("name_or_collection must be an instance of str or Collection") + cmd = SON([("validate", name), ("scandata", scandata), ("full", full)]) + if comment is not None: + cmd["comment"] = comment + + if background is not None: + cmd["background"] = background + + result = cast(dict, self.command(cmd, session=session)) + + valid = True + # Pre 1.9 results + if "result" in result: + info = result["result"] + if info.find("exception") != -1 or info.find("corrupt") != -1: + raise CollectionInvalid(f"{name} invalid: {info}") + # Sharded results + elif "raw" in result: + for _, res in result["raw"].items(): + if "result" in res: + info = res["result"] + if info.find("exception") != -1 or info.find("corrupt") != -1: + raise CollectionInvalid(f"{name} invalid: {info}") + elif not res.get("valid", False): + valid = False + break + # Post 1.9 non-sharded results. + elif not result.get("valid", False): + valid = False + + if not valid: + raise CollectionInvalid(f"{name} invalid: {result!r}") + + return result + + # See PYTHON-3084. + __iter__ = None + + def __next__(self) -> NoReturn: + raise TypeError("'Database' object is not iterable") + + next = __next__ + + def __bool__(self) -> NoReturn: + raise NotImplementedError( + "Database objects do not implement truth " + "value testing or bool(). Please compare " + "with None instead: database is not None" + ) + + def dereference( + self, + dbref: DBRef, + session: Optional[ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> Optional[_DocumentType]: + """Dereference a :class:`~bson.dbref.DBRef`, getting the + document it points to. + + Raises :class:`TypeError` if `dbref` is not an instance of + :class:`~bson.dbref.DBRef`. Returns a document, or ``None`` if + the reference does not point to a valid document. Raises + :class:`ValueError` if `dbref` has a database specified that + is different from the current database. + + :Parameters: + - `dbref`: the reference + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): any additional keyword arguments + are the same as the arguments to + :meth:`~pymongo.collection.Collection.find`. + + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + if not isinstance(dbref, DBRef): + raise TypeError("cannot dereference a %s" % type(dbref)) + if dbref.database is not None and dbref.database != self.__name: + raise ValueError( + "trying to dereference a DBRef that points to " + "another database ({!r} not {!r})".format(dbref.database, self.__name) + ) + return self[dbref.collection].find_one( + {"_id": dbref.id}, session=session, comment=comment, **kwargs + ) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/driver_info.py b/backend/test/lib/python3.8/site-packages/pymongo/driver_info.py new file mode 100644 index 0000000000000000000000000000000000000000..86ddfcfb3e96ae4bb01e1b821c9bd27c0b7b67a9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/driver_info.py @@ -0,0 +1,42 @@ +# Copyright 2018-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Advanced options for MongoDB drivers implemented on top of PyMongo.""" + +from collections import namedtuple +from typing import Optional + + +class DriverInfo(namedtuple("DriverInfo", ["name", "version", "platform"])): + """Info about a driver wrapping PyMongo. + + The MongoDB server logs PyMongo's name, version, and platform whenever + PyMongo establishes a connection. A driver implemented on top of PyMongo + can add its own info to this log message. Initialize with three strings + like 'MyDriver', '1.2.3', 'some platform info'. Any of these strings may be + None to accept PyMongo's default. + """ + + def __new__( + cls, name: str, version: Optional[str] = None, platform: Optional[str] = None + ) -> "DriverInfo": + self = super().__new__(cls, name, version, platform) + for key, value in self._asdict().items(): + if value is not None and not isinstance(value, str): + raise TypeError( + "Wrong type for DriverInfo {} option, value " + "must be an instance of str".format(key) + ) + + return self diff --git a/backend/test/lib/python3.8/site-packages/pymongo/encryption.py b/backend/test/lib/python3.8/site-packages/pymongo/encryption.py new file mode 100644 index 0000000000000000000000000000000000000000..c3e2e340732cdb910adece440c404417241a924e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/encryption.py @@ -0,0 +1,1117 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Support for explicit client-side field level encryption.""" +from __future__ import annotations + +import contextlib +import enum +import socket +import weakref +from copy import deepcopy +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generic, + Iterator, + Mapping, + MutableMapping, + Optional, + Sequence, + Tuple, +) + +try: + from pymongocrypt.auto_encrypter import AutoEncrypter + from pymongocrypt.errors import MongoCryptError # noqa: F401 + from pymongocrypt.explicit_encrypter import ExplicitEncrypter + from pymongocrypt.mongocrypt import MongoCryptOptions + from pymongocrypt.state_machine import MongoCryptCallback + + _HAVE_PYMONGOCRYPT = True +except ImportError: + _HAVE_PYMONGOCRYPT = False + MongoCryptCallback = object + +from bson import _dict_to_bson, decode, encode +from bson.binary import STANDARD, UUID_SUBTYPE, Binary +from bson.codec_options import CodecOptions +from bson.errors import BSONError +from bson.raw_bson import DEFAULT_RAW_BSON_OPTIONS, RawBSONDocument, _inflate_bson +from bson.son import SON +from pymongo import _csot +from pymongo.collection import Collection +from pymongo.common import CONNECT_TIMEOUT +from pymongo.cursor import Cursor +from pymongo.daemon import _spawn_daemon +from pymongo.database import Database +from pymongo.encryption_options import AutoEncryptionOpts, RangeOpts +from pymongo.errors import ( + ConfigurationError, + EncryptedCollectionError, + EncryptionError, + InvalidOperation, + PyMongoError, + ServerSelectionTimeoutError, +) +from pymongo.mongo_client import MongoClient +from pymongo.network import BLOCKING_IO_ERRORS +from pymongo.operations import UpdateOne +from pymongo.pool import PoolOptions, _configured_socket, _raise_connection_failure +from pymongo.read_concern import ReadConcern +from pymongo.results import BulkWriteResult, DeleteResult +from pymongo.ssl_support import get_ssl_context +from pymongo.typings import _DocumentType +from pymongo.uri_parser import parse_host +from pymongo.write_concern import WriteConcern + +if TYPE_CHECKING: + from pymongocrypt.mongocrypt import MongoCryptKmsContext + +_HTTPS_PORT = 443 +_KMS_CONNECT_TIMEOUT = CONNECT_TIMEOUT # CDRIVER-3262 redefined this value to CONNECT_TIMEOUT +_MONGOCRYPTD_TIMEOUT_MS = 10000 + + +_DATA_KEY_OPTS: CodecOptions = CodecOptions(document_class=SON, uuid_representation=STANDARD) +# Use RawBSONDocument codec options to avoid needlessly decoding +# documents from the key vault. +_KEY_VAULT_OPTS = CodecOptions(document_class=RawBSONDocument) + + +@contextlib.contextmanager +def _wrap_encryption_errors() -> Iterator[None]: + """Context manager to wrap encryption related errors.""" + try: + yield + except BSONError: + # BSON encoding/decoding errors are unrelated to encryption so + # we should propagate them unchanged. + raise + except Exception as exc: + raise EncryptionError(exc) + + +class _EncryptionIO(MongoCryptCallback): # type: ignore[misc] + def __init__( + self, + client: Optional[MongoClient], + key_vault_coll: Collection, + mongocryptd_client: Optional[MongoClient], + opts: AutoEncryptionOpts, + ): + """Internal class to perform I/O on behalf of pymongocrypt.""" + self.client_ref: Any + # Use a weak ref to break reference cycle. + if client is not None: + self.client_ref = weakref.ref(client) + else: + self.client_ref = None + self.key_vault_coll: Optional[Collection] = key_vault_coll.with_options( + codec_options=_KEY_VAULT_OPTS, + read_concern=ReadConcern(level="majority"), + write_concern=WriteConcern(w="majority"), + ) + self.mongocryptd_client = mongocryptd_client + self.opts = opts + self._spawned = False + + def kms_request(self, kms_context: MongoCryptKmsContext) -> None: + """Complete a KMS request. + + :Parameters: + - `kms_context`: A :class:`MongoCryptKmsContext`. + + :Returns: + None + """ + endpoint = kms_context.endpoint + message = kms_context.message + provider = kms_context.kms_provider + ctx = self.opts._kms_ssl_contexts.get(provider) + if ctx is None: + # Enable strict certificate verification, OCSP, match hostname, and + # SNI using the system default CA certificates. + ctx = get_ssl_context( + None, # certfile + None, # passphrase + None, # ca_certs + None, # crlfile + False, # allow_invalid_certificates + False, # allow_invalid_hostnames + False, + ) # disable_ocsp_endpoint_check + # CSOT: set timeout for socket creation. + connect_timeout = max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0.001) + opts = PoolOptions( + connect_timeout=connect_timeout, + socket_timeout=connect_timeout, + ssl_context=ctx, + ) + host, port = parse_host(endpoint, _HTTPS_PORT) + try: + conn = _configured_socket((host, port), opts) + try: + conn.sendall(message) + while kms_context.bytes_needed > 0: + # CSOT: update timeout. + conn.settimeout(max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0)) + data = conn.recv(kms_context.bytes_needed) + if not data: + raise OSError("KMS connection closed") + kms_context.feed(data) + except BLOCKING_IO_ERRORS: + raise socket.timeout("timed out") + finally: + conn.close() + except (PyMongoError, MongoCryptError): + raise # Propagate pymongo errors directly. + except Exception as error: + # Wrap I/O errors in PyMongo exceptions. + _raise_connection_failure((host, port), error) + + def collection_info(self, database: Database, filter: bytes) -> Optional[bytes]: + """Get the collection info for a namespace. + + The returned collection info is passed to libmongocrypt which reads + the JSON schema. + + :Parameters: + - `database`: The database on which to run listCollections. + - `filter`: The filter to pass to listCollections. + + :Returns: + The first document from the listCollections command response as BSON. + """ + with self.client_ref()[database].list_collections(filter=RawBSONDocument(filter)) as cursor: + for doc in cursor: + return _dict_to_bson(doc, False, _DATA_KEY_OPTS) + return None + + def spawn(self) -> None: + """Spawn mongocryptd. + + Note this method is thread safe; at most one mongocryptd will start + successfully. + """ + self._spawned = True + args = [self.opts._mongocryptd_spawn_path or "mongocryptd"] + args.extend(self.opts._mongocryptd_spawn_args) + _spawn_daemon(args) + + def mark_command(self, database: str, cmd: bytes) -> bytes: + """Mark a command for encryption. + + :Parameters: + - `database`: The database on which to run this command. + - `cmd`: The BSON command to run. + + :Returns: + The marked command response from mongocryptd. + """ + if not self._spawned and not self.opts._mongocryptd_bypass_spawn: + self.spawn() + # Database.command only supports mutable mappings so we need to decode + # the raw BSON command first. + inflated_cmd = _inflate_bson(cmd, DEFAULT_RAW_BSON_OPTIONS) + assert self.mongocryptd_client is not None + try: + res = self.mongocryptd_client[database].command( + inflated_cmd, codec_options=DEFAULT_RAW_BSON_OPTIONS + ) + except ServerSelectionTimeoutError: + if self.opts._mongocryptd_bypass_spawn: + raise + self.spawn() + res = self.mongocryptd_client[database].command( + inflated_cmd, codec_options=DEFAULT_RAW_BSON_OPTIONS + ) + return res.raw + + def fetch_keys(self, filter: bytes) -> Iterator[bytes]: + """Yields one or more keys from the key vault. + + :Parameters: + - `filter`: The filter to pass to find. + + :Returns: + A generator which yields the requested keys from the key vault. + """ + assert self.key_vault_coll is not None + with self.key_vault_coll.find(RawBSONDocument(filter)) as cursor: + for key in cursor: + yield key.raw + + def insert_data_key(self, data_key: bytes) -> Binary: + """Insert a data key into the key vault. + + :Parameters: + - `data_key`: The data key document to insert. + + :Returns: + The _id of the inserted data key document. + """ + raw_doc = RawBSONDocument(data_key, _KEY_VAULT_OPTS) + data_key_id = raw_doc.get("_id") + if not isinstance(data_key_id, Binary) or data_key_id.subtype != UUID_SUBTYPE: + raise TypeError("data_key _id must be Binary with a UUID subtype") + + assert self.key_vault_coll is not None + self.key_vault_coll.insert_one(raw_doc) + return data_key_id + + def bson_encode(self, doc: MutableMapping[str, Any]) -> bytes: + """Encode a document to BSON. + + A document can be any mapping type (like :class:`dict`). + + :Parameters: + - `doc`: mapping type representing a document + + :Returns: + The encoded BSON bytes. + """ + return encode(doc) + + def close(self) -> None: + """Release resources. + + Note it is not safe to call this method from __del__ or any GC hooks. + """ + self.client_ref = None + self.key_vault_coll = None + if self.mongocryptd_client: + self.mongocryptd_client.close() + self.mongocryptd_client = None + + +class RewrapManyDataKeyResult: + """Result object returned by a :meth:`~ClientEncryption.rewrap_many_data_key` operation. + + .. versionadded:: 4.2 + """ + + def __init__(self, bulk_write_result: Optional[BulkWriteResult] = None) -> None: + self._bulk_write_result = bulk_write_result + + @property + def bulk_write_result(self) -> Optional[BulkWriteResult]: + """The result of the bulk write operation used to update the key vault + collection with one or more rewrapped data keys. If + :meth:`~ClientEncryption.rewrap_many_data_key` does not find any matching keys to rewrap, + no bulk write operation will be executed and this field will be + ``None``. + """ + return self._bulk_write_result + + +class _Encrypter: + """Encrypts and decrypts MongoDB commands. + + This class is used to support automatic encryption and decryption of + MongoDB commands. + """ + + def __init__(self, client: MongoClient, opts: AutoEncryptionOpts): + """Create a _Encrypter for a client. + + :Parameters: + - `client`: The encrypted MongoClient. + - `opts`: The encrypted client's :class:`AutoEncryptionOpts`. + """ + if opts._schema_map is None: + schema_map = None + else: + schema_map = _dict_to_bson(opts._schema_map, False, _DATA_KEY_OPTS) + + if opts._encrypted_fields_map is None: + encrypted_fields_map = None + else: + encrypted_fields_map = _dict_to_bson(opts._encrypted_fields_map, False, _DATA_KEY_OPTS) + self._bypass_auto_encryption = opts._bypass_auto_encryption + self._internal_client = None + + def _get_internal_client(encrypter: _Encrypter, mongo_client: MongoClient) -> MongoClient: + if mongo_client.options.pool_options.max_pool_size is None: + # Unlimited pool size, use the same client. + return mongo_client + # Else - limited pool size, use an internal client. + if encrypter._internal_client is not None: + return encrypter._internal_client + internal_client = mongo_client._duplicate(minPoolSize=0, auto_encryption_opts=None) + encrypter._internal_client = internal_client + return internal_client + + if opts._key_vault_client is not None: + key_vault_client = opts._key_vault_client + else: + key_vault_client = _get_internal_client(self, client) + + if opts._bypass_auto_encryption: + metadata_client = None + else: + metadata_client = _get_internal_client(self, client) + + db, coll = opts._key_vault_namespace.split(".", 1) + key_vault_coll = key_vault_client[db][coll] + + mongocryptd_client: MongoClient = MongoClient( + opts._mongocryptd_uri, connect=False, serverSelectionTimeoutMS=_MONGOCRYPTD_TIMEOUT_MS + ) + + io_callbacks = _EncryptionIO(metadata_client, key_vault_coll, mongocryptd_client, opts) + self._auto_encrypter = AutoEncrypter( + io_callbacks, + MongoCryptOptions( + opts._kms_providers, + schema_map, + crypt_shared_lib_path=opts._crypt_shared_lib_path, + crypt_shared_lib_required=opts._crypt_shared_lib_required, + bypass_encryption=opts._bypass_auto_encryption, + encrypted_fields_map=encrypted_fields_map, + bypass_query_analysis=opts._bypass_query_analysis, + ), + ) + self._closed = False + + def encrypt( + self, database: str, cmd: Mapping[str, Any], codec_options: CodecOptions + ) -> Dict[Any, Any]: + """Encrypt a MongoDB command. + + :Parameters: + - `database`: The database for this command. + - `cmd`: A command document. + - `codec_options`: The CodecOptions to use while encoding `cmd`. + + :Returns: + The encrypted command to execute. + """ + self._check_closed() + encoded_cmd = _dict_to_bson(cmd, False, codec_options) + with _wrap_encryption_errors(): + encrypted_cmd = self._auto_encrypter.encrypt(database, encoded_cmd) + # TODO: PYTHON-1922 avoid decoding the encrypted_cmd. + encrypt_cmd = _inflate_bson(encrypted_cmd, DEFAULT_RAW_BSON_OPTIONS) + return encrypt_cmd + + def decrypt(self, response: bytes) -> Optional[bytes]: + """Decrypt a MongoDB command response. + + :Parameters: + - `response`: A MongoDB command response as BSON. + + :Returns: + The decrypted command response. + """ + self._check_closed() + with _wrap_encryption_errors(): + return self._auto_encrypter.decrypt(response) + + def _check_closed(self) -> None: + if self._closed: + raise InvalidOperation("Cannot use MongoClient after close") + + def close(self) -> None: + """Cleanup resources.""" + self._closed = True + self._auto_encrypter.close() + if self._internal_client: + self._internal_client.close() + self._internal_client = None + + +class Algorithm(str, enum.Enum): + """An enum that defines the supported encryption algorithms.""" + + AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + """AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic.""" + AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random" + """AEAD_AES_256_CBC_HMAC_SHA_512_Random.""" + INDEXED = "Indexed" + """Indexed. + + .. versionadded:: 4.2 + """ + UNINDEXED = "Unindexed" + """Unindexed. + + .. versionadded:: 4.2 + """ + RANGEPREVIEW = "RangePreview" + """RangePreview. + + .. note:: Support for Range queries is in beta. + Backwards-breaking changes may be made before the final release. + + .. versionadded:: 4.4 + """ + + +class QueryType(str, enum.Enum): + """An enum that defines the supported values for explicit encryption query_type. + + .. versionadded:: 4.2 + """ + + EQUALITY = "equality" + """Used to encrypt a value for an equality query.""" + + RANGEPREVIEW = "rangePreview" + """Used to encrypt a value for a range query. + + .. note:: Support for Range queries is in beta. + Backwards-breaking changes may be made before the final release. +""" + + +class ClientEncryption(Generic[_DocumentType]): + """Explicit client-side field level encryption.""" + + def __init__( + self, + kms_providers: Mapping[str, Any], + key_vault_namespace: str, + key_vault_client: MongoClient, + codec_options: CodecOptions, + kms_tls_options: Optional[Mapping[str, Any]] = None, + ) -> None: + """Explicit client-side field level encryption. + + The ClientEncryption class encapsulates explicit operations on a key + vault collection that cannot be done directly on a MongoClient. Similar + to configuring auto encryption on a MongoClient, it is constructed with + a MongoClient (to a MongoDB cluster containing the key vault + collection), KMS provider configuration, and keyVaultNamespace. It + provides an API for explicitly encrypting and decrypting values, and + creating data keys. It does not provide an API to query keys from the + key vault collection, as this can be done directly on the MongoClient. + + See :ref:`explicit-client-side-encryption` for an example. + + :Parameters: + - `kms_providers`: Map of KMS provider options. The `kms_providers` + map values differ by provider: + + - `aws`: Map with "accessKeyId" and "secretAccessKey" as strings. + These are the AWS access key ID and AWS secret access key used + to generate KMS messages. An optional "sessionToken" may be + included to support temporary AWS credentials. + - `azure`: Map with "tenantId", "clientId", and "clientSecret" as + strings. Additionally, "identityPlatformEndpoint" may also be + specified as a string (defaults to 'login.microsoftonline.com'). + These are the Azure Active Directory credentials used to + generate Azure Key Vault messages. + - `gcp`: Map with "email" as a string and "privateKey" + as `bytes` or a base64 encoded string. + Additionally, "endpoint" may also be specified as a string + (defaults to 'oauth2.googleapis.com'). These are the + credentials used to generate Google Cloud KMS messages. + - `kmip`: Map with "endpoint" as a host with required port. + For example: ``{"endpoint": "example.com:443"}``. + - `local`: Map with "key" as `bytes` (96 bytes in length) or + a base64 encoded string which decodes + to 96 bytes. "key" is the master key used to encrypt/decrypt + data keys. This key should be generated and stored as securely + as possible. + + - `key_vault_namespace`: The namespace for the key vault collection. + The key vault collection contains all data keys used for encryption + and decryption. Data keys are stored as documents in this MongoDB + collection. Data keys are protected with encryption by a KMS + provider. + - `key_vault_client`: A MongoClient connected to a MongoDB cluster + containing the `key_vault_namespace` collection. + - `codec_options`: An instance of + :class:`~bson.codec_options.CodecOptions` to use when encoding a + value for encryption and decoding the decrypted BSON value. This + should be the same CodecOptions instance configured on the + MongoClient, Database, or Collection used to access application + data. + - `kms_tls_options` (optional): A map of KMS provider names to TLS + options to use when creating secure connections to KMS providers. + Accepts the same TLS options as + :class:`pymongo.mongo_client.MongoClient`. For example, to + override the system default CA file:: + + kms_tls_options={'kmip': {'tlsCAFile': certifi.where()}} + + Or to supply a client certificate:: + + kms_tls_options={'kmip': {'tlsCertificateKeyFile': 'client.pem'}} + + .. versionchanged:: 4.0 + Added the `kms_tls_options` parameter and the "kmip" KMS provider. + + .. versionadded:: 3.9 + """ + if not _HAVE_PYMONGOCRYPT: + raise ConfigurationError( + "client-side field level encryption requires the pymongocrypt " + "library: install a compatible version with: " + "python -m pip install 'pymongo[encryption]'" + ) + + if not isinstance(codec_options, CodecOptions): + raise TypeError("codec_options must be an instance of bson.codec_options.CodecOptions") + + self._kms_providers = kms_providers + self._key_vault_namespace = key_vault_namespace + self._key_vault_client = key_vault_client + self._codec_options = codec_options + + db, coll = key_vault_namespace.split(".", 1) + key_vault_coll = key_vault_client[db][coll] + + opts = AutoEncryptionOpts( + kms_providers, key_vault_namespace, kms_tls_options=kms_tls_options + ) + self._io_callbacks: Optional[_EncryptionIO] = _EncryptionIO( + None, key_vault_coll, None, opts + ) + self._encryption = ExplicitEncrypter( + self._io_callbacks, MongoCryptOptions(kms_providers, None) + ) + # Use the same key vault collection as the callback. + self._key_vault_coll = self._io_callbacks.key_vault_coll + + def create_encrypted_collection( + self, + database: Database, + name: str, + encrypted_fields: Mapping[str, Any], + kms_provider: Optional[str] = None, + master_key: Optional[Mapping[str, Any]] = None, + **kwargs: Any, + ) -> Tuple[Collection[_DocumentType], Mapping[str, Any]]: + """Create a collection with encryptedFields. + + .. warning:: + This function does not update the encryptedFieldsMap in the client's + AutoEncryptionOpts, thus the user must create a new client after calling this function with + the encryptedFields returned. + + Normally collection creation is automatic. This method should + only be used to specify options on + creation. :class:`~pymongo.errors.EncryptionError` will be + raised if the collection already exists. + + :Parameters: + - `name`: the name of the collection to create + - `encrypted_fields` (dict): Document that describes the encrypted fields for + Queryable Encryption. For example:: + + { + "escCollection": "enxcol_.encryptedCollection.esc", + "ecocCollection": "enxcol_.encryptedCollection.ecoc", + "fields": [ + { + "path": "firstName", + "keyId": Binary.from_uuid(UUID('00000000-0000-0000-0000-000000000000')), + "bsonType": "string", + "queries": {"queryType": "equality"} + }, + { + "path": "ssn", + "keyId": Binary.from_uuid(UUID('04104104-1041-0410-4104-104104104104')), + "bsonType": "string" + } + ] + } + + The "keyId" may be set to ``None`` to auto-generate the data keys. + - `kms_provider` (optional): the KMS provider to be used + - `master_key` (optional): Identifies a KMS-specific key used to encrypt the + new data key. If the kmsProvider is "local" the `master_key` is + not applicable and may be omitted. + - `**kwargs` (optional): additional keyword arguments are the same as "create_collection". + + All optional `create collection command`_ parameters should be passed + as keyword arguments to this method. + See the documentation for :meth:`~pymongo.database.Database.create_collection` for all valid options. + + :Raises: + - :class:`~pymongo.errors.EncryptedCollectionError`: When either data-key creation or creating the collection fails. + + .. versionadded:: 4.4 + + .. _create collection command: + https://mongodb.com/docs/manual/reference/command/create + + """ + encrypted_fields = deepcopy(encrypted_fields) + for i, field in enumerate(encrypted_fields["fields"]): + if isinstance(field, dict) and field.get("keyId") is None: + try: + encrypted_fields["fields"][i]["keyId"] = self.create_data_key( + kms_provider=kms_provider, # type:ignore[arg-type] + master_key=master_key, + ) + except EncryptionError as exc: + raise EncryptedCollectionError(exc, encrypted_fields) from exc + kwargs["encryptedFields"] = encrypted_fields + kwargs["check_exists"] = False + try: + return ( + database.create_collection(name=name, **kwargs), + encrypted_fields, + ) + except Exception as exc: + raise EncryptedCollectionError(exc, encrypted_fields) from exc + + def create_data_key( + self, + kms_provider: str, + master_key: Optional[Mapping[str, Any]] = None, + key_alt_names: Optional[Sequence[str]] = None, + key_material: Optional[bytes] = None, + ) -> Binary: + """Create and insert a new data key into the key vault collection. + + :Parameters: + - `kms_provider`: The KMS provider to use. Supported values are + "aws", "azure", "gcp", "kmip", and "local". + - `master_key`: Identifies a KMS-specific key used to encrypt the + new data key. If the kmsProvider is "local" the `master_key` is + not applicable and may be omitted. + + If the `kms_provider` is "aws" it is required and has the + following fields:: + + - `region` (string): Required. The AWS region, e.g. "us-east-1". + - `key` (string): Required. The Amazon Resource Name (ARN) to + the AWS customer. + - `endpoint` (string): Optional. An alternate host to send KMS + requests to. May include port number, e.g. + "kms.us-east-1.amazonaws.com:443". + + If the `kms_provider` is "azure" it is required and has the + following fields:: + + - `keyVaultEndpoint` (string): Required. Host with optional + port, e.g. "example.vault.azure.net". + - `keyName` (string): Required. Key name in the key vault. + - `keyVersion` (string): Optional. Version of the key to use. + + If the `kms_provider` is "gcp" it is required and has the + following fields:: + + - `projectId` (string): Required. The Google cloud project ID. + - `location` (string): Required. The GCP location, e.g. "us-east1". + - `keyRing` (string): Required. Name of the key ring that contains + the key to use. + - `keyName` (string): Required. Name of the key to use. + - `keyVersion` (string): Optional. Version of the key to use. + - `endpoint` (string): Optional. Host with optional port. + Defaults to "cloudkms.googleapis.com". + + If the `kms_provider` is "kmip" it is optional and has the + following fields:: + + - `keyId` (string): Optional. `keyId` is the KMIP Unique + Identifier to a 96 byte KMIP Secret Data managed object. If + keyId is omitted, the driver creates a random 96 byte KMIP + Secret Data managed object. + - `endpoint` (string): Optional. Host with optional + port, e.g. "example.vault.azure.net:". + + - `key_alt_names` (optional): An optional list of string alternate + names used to reference a key. If a key is created with alternate + names, then encryption may refer to the key by the unique alternate + name instead of by ``key_id``. The following example shows creating + and referring to a data key by alternate name:: + + client_encryption.create_data_key("local", key_alt_names=["name1"]) + # reference the key with the alternate name + client_encryption.encrypt("457-55-5462", key_alt_name="name1", + algorithm=Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Random) + - `key_material` (optional): Sets the custom key material to be used + by the data key for encryption and decryption. + + :Returns: + The ``_id`` of the created data key document as a + :class:`~bson.binary.Binary` with subtype + :data:`~bson.binary.UUID_SUBTYPE`. + + .. versionchanged:: 4.2 + Added the `key_material` parameter. + """ + self._check_closed() + with _wrap_encryption_errors(): + return self._encryption.create_data_key( + kms_provider, + master_key=master_key, + key_alt_names=key_alt_names, + key_material=key_material, + ) + + def _encrypt_helper( + self, + value: Any, + algorithm: str, + key_id: Optional[Binary] = None, + key_alt_name: Optional[str] = None, + query_type: Optional[str] = None, + contention_factor: Optional[int] = None, + range_opts: Optional[RangeOpts] = None, + is_expression: bool = False, + ) -> Any: + self._check_closed() + if key_id is not None and not ( + isinstance(key_id, Binary) and key_id.subtype == UUID_SUBTYPE + ): + raise TypeError("key_id must be a bson.binary.Binary with subtype 4") + + doc = encode( + {"v": value}, + codec_options=self._codec_options, + ) + range_opts_bytes = None + if range_opts: + range_opts_bytes = encode( + range_opts.document, + codec_options=self._codec_options, + ) + with _wrap_encryption_errors(): + encrypted_doc = self._encryption.encrypt( + value=doc, + algorithm=algorithm, + key_id=key_id, + key_alt_name=key_alt_name, + query_type=query_type, + contention_factor=contention_factor, + range_opts=range_opts_bytes, + is_expression=is_expression, + ) + return decode(encrypted_doc)["v"] + + def encrypt( + self, + value: Any, + algorithm: str, + key_id: Optional[Binary] = None, + key_alt_name: Optional[str] = None, + query_type: Optional[str] = None, + contention_factor: Optional[int] = None, + range_opts: Optional[RangeOpts] = None, + ) -> Binary: + """Encrypt a BSON value with a given key and algorithm. + + Note that exactly one of ``key_id`` or ``key_alt_name`` must be + provided. + + :Parameters: + - `value`: The BSON value to encrypt. + - `algorithm` (string): The encryption algorithm to use. See + :class:`Algorithm` for some valid options. + - `key_id`: Identifies a data key by ``_id`` which must be a + :class:`~bson.binary.Binary` with subtype 4 ( + :attr:`~bson.binary.UUID_SUBTYPE`). + - `key_alt_name`: Identifies a key vault document by 'keyAltName'. + - `query_type` (str): The query type to execute. See :class:`QueryType` for valid options. + - `contention_factor` (int): The contention factor to use + when the algorithm is :attr:`Algorithm.INDEXED`. An integer value + *must* be given when the :attr:`Algorithm.INDEXED` algorithm is + used. + - `range_opts`: Experimental only, not intended for public use. + + :Returns: + The encrypted value, a :class:`~bson.binary.Binary` with subtype 6. + + .. versionchanged:: 4.2 + Added the `query_type` and `contention_factor` parameters. + """ + return self._encrypt_helper( + value=value, + algorithm=algorithm, + key_id=key_id, + key_alt_name=key_alt_name, + query_type=query_type, + contention_factor=contention_factor, + range_opts=range_opts, + is_expression=False, + ) + + def encrypt_expression( + self, + expression: Mapping[str, Any], + algorithm: str, + key_id: Optional[Binary] = None, + key_alt_name: Optional[str] = None, + query_type: Optional[str] = None, + contention_factor: Optional[int] = None, + range_opts: Optional[RangeOpts] = None, + ) -> RawBSONDocument: + """Encrypt a BSON expression with a given key and algorithm. + + Note that exactly one of ``key_id`` or ``key_alt_name`` must be + provided. + + :Parameters: + - `expression`: The BSON aggregate or match expression to encrypt. + - `algorithm` (string): The encryption algorithm to use. See + :class:`Algorithm` for some valid options. + - `key_id`: Identifies a data key by ``_id`` which must be a + :class:`~bson.binary.Binary` with subtype 4 ( + :attr:`~bson.binary.UUID_SUBTYPE`). + - `key_alt_name`: Identifies a key vault document by 'keyAltName'. + - `query_type` (str): The query type to execute. See + :class:`QueryType` for valid options. + - `contention_factor` (int): The contention factor to use + when the algorithm is :attr:`Algorithm.INDEXED`. An integer value + *must* be given when the :attr:`Algorithm.INDEXED` algorithm is + used. + - `range_opts`: Experimental only, not intended for public use. + + :Returns: + The encrypted expression, a :class:`~bson.RawBSONDocument`. + + .. versionadded:: 4.4 + """ + return self._encrypt_helper( + value=expression, + algorithm=algorithm, + key_id=key_id, + key_alt_name=key_alt_name, + query_type=query_type, + contention_factor=contention_factor, + range_opts=range_opts, + is_expression=True, + ) + + def decrypt(self, value: Binary) -> Any: + """Decrypt an encrypted value. + + :Parameters: + - `value` (Binary): The encrypted value, a + :class:`~bson.binary.Binary` with subtype 6. + + :Returns: + The decrypted BSON value. + """ + self._check_closed() + if not (isinstance(value, Binary) and value.subtype == 6): + raise TypeError("value to decrypt must be a bson.binary.Binary with subtype 6") + + with _wrap_encryption_errors(): + doc = encode({"v": value}) + decrypted_doc = self._encryption.decrypt(doc) + return decode(decrypted_doc, codec_options=self._codec_options)["v"] + + def get_key(self, id: Binary) -> Optional[RawBSONDocument]: + """Get a data key by id. + + :Parameters: + - `id` (Binary): The UUID of a key a which must be a + :class:`~bson.binary.Binary` with subtype 4 ( + :attr:`~bson.binary.UUID_SUBTYPE`). + + :Returns: + The key document. + + .. versionadded:: 4.2 + """ + self._check_closed() + assert self._key_vault_coll is not None + return self._key_vault_coll.find_one({"_id": id}) + + def get_keys(self) -> Cursor[RawBSONDocument]: + """Get all of the data keys. + + :Returns: + An instance of :class:`~pymongo.cursor.Cursor` over the data key + documents. + + .. versionadded:: 4.2 + """ + self._check_closed() + assert self._key_vault_coll is not None + return self._key_vault_coll.find({}) + + def delete_key(self, id: Binary) -> DeleteResult: + """Delete a key document in the key vault collection that has the given ``key_id``. + + :Parameters: + - `id` (Binary): The UUID of a key a which must be a + :class:`~bson.binary.Binary` with subtype 4 ( + :attr:`~bson.binary.UUID_SUBTYPE`). + + :Returns: + The delete result. + + .. versionadded:: 4.2 + """ + self._check_closed() + assert self._key_vault_coll is not None + return self._key_vault_coll.delete_one({"_id": id}) + + def add_key_alt_name(self, id: Binary, key_alt_name: str) -> Any: + """Add ``key_alt_name`` to the set of alternate names in the key document with UUID ``key_id``. + + :Parameters: + - ``id``: The UUID of a key a which must be a + :class:`~bson.binary.Binary` with subtype 4 ( + :attr:`~bson.binary.UUID_SUBTYPE`). + - ``key_alt_name``: The key alternate name to add. + + :Returns: + The previous version of the key document. + + .. versionadded:: 4.2 + """ + self._check_closed() + update = {"$addToSet": {"keyAltNames": key_alt_name}} + assert self._key_vault_coll is not None + return self._key_vault_coll.find_one_and_update({"_id": id}, update) + + def get_key_by_alt_name(self, key_alt_name: str) -> Optional[RawBSONDocument]: + """Get a key document in the key vault collection that has the given ``key_alt_name``. + + :Parameters: + - `key_alt_name`: (str): The key alternate name of the key to get. + + :Returns: + The key document. + + .. versionadded:: 4.2 + """ + self._check_closed() + assert self._key_vault_coll is not None + return self._key_vault_coll.find_one({"keyAltNames": key_alt_name}) + + def remove_key_alt_name(self, id: Binary, key_alt_name: str) -> Optional[RawBSONDocument]: + """Remove ``key_alt_name`` from the set of keyAltNames in the key document with UUID ``id``. + + Also removes the ``keyAltNames`` field from the key document if it would otherwise be empty. + + :Parameters: + - ``id``: The UUID of a key a which must be a + :class:`~bson.binary.Binary` with subtype 4 ( + :attr:`~bson.binary.UUID_SUBTYPE`). + - ``key_alt_name``: The key alternate name to remove. + + :Returns: + Returns the previous version of the key document. + + .. versionadded:: 4.2 + """ + self._check_closed() + pipeline = [ + { + "$set": { + "keyAltNames": { + "$cond": [ + {"$eq": ["$keyAltNames", [key_alt_name]]}, + "$$REMOVE", + { + "$filter": { + "input": "$keyAltNames", + "cond": {"$ne": ["$$this", key_alt_name]}, + } + }, + ] + } + } + } + ] + assert self._key_vault_coll is not None + return self._key_vault_coll.find_one_and_update({"_id": id}, pipeline) + + def rewrap_many_data_key( + self, + filter: Mapping[str, Any], + provider: Optional[str] = None, + master_key: Optional[Mapping[str, Any]] = None, + ) -> RewrapManyDataKeyResult: + """Decrypts and encrypts all matching data keys in the key vault with a possibly new `master_key` value. + + :Parameters: + - `filter`: A document used to filter the data keys. + - `provider`: The new KMS provider to use to encrypt the data keys, + or ``None`` to use the current KMS provider(s). + - ``master_key``: The master key fields corresponding to the new KMS + provider when ``provider`` is not ``None``. + + :Returns: + A :class:`RewrapManyDataKeyResult`. + + This method allows you to re-encrypt all of your data-keys with a new CMK, or master key. + Note that this does *not* require re-encrypting any of the data in your encrypted collections, + but rather refreshes the key that protects the keys that encrypt the data: + + .. code-block:: python + + client_encryption.rewrap_many_data_key( + filter={"keyAltNames": "optional filter for which keys you want to update"}, + master_key={ + "provider": "azure", # replace with your cloud provider + "master_key": { + # put the rest of your master_key options here + "key": "<your new key>" + }, + }, + ) + + .. versionadded:: 4.2 + """ + if master_key is not None and provider is None: + raise ConfigurationError("A provider must be given if a master_key is given") + self._check_closed() + with _wrap_encryption_errors(): + raw_result = self._encryption.rewrap_many_data_key(filter, provider, master_key) + if raw_result is None: + return RewrapManyDataKeyResult() + + raw_doc = RawBSONDocument(raw_result, DEFAULT_RAW_BSON_OPTIONS) + replacements = [] + for key in raw_doc["v"]: + update_model = { + "$set": {"keyMaterial": key["keyMaterial"], "masterKey": key["masterKey"]}, + "$currentDate": {"updateDate": True}, + } + op = UpdateOne({"_id": key["_id"]}, update_model) + replacements.append(op) + if not replacements: + return RewrapManyDataKeyResult() + assert self._key_vault_coll is not None + result = self._key_vault_coll.bulk_write(replacements) + return RewrapManyDataKeyResult(result) + + def __enter__(self) -> "ClientEncryption": + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + self.close() + + def _check_closed(self) -> None: + if self._encryption is None: + raise InvalidOperation("Cannot use closed ClientEncryption") + + def close(self) -> None: + """Release resources. + + Note that using this class in a with-statement will automatically call + :meth:`close`:: + + with ClientEncryption(...) as client_encryption: + encrypted = client_encryption.encrypt(value, ...) + decrypted = client_encryption.decrypt(encrypted) + + """ + if self._io_callbacks: + self._io_callbacks.close() + self._encryption.close() + self._io_callbacks = None + self._encryption = None diff --git a/backend/test/lib/python3.8/site-packages/pymongo/encryption_options.py b/backend/test/lib/python3.8/site-packages/pymongo/encryption_options.py new file mode 100644 index 0000000000000000000000000000000000000000..b4ffd92a8c56cbbc9d817ca9c4bcb59cbd8d4565 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/encryption_options.py @@ -0,0 +1,258 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Support for automatic client-side field level encryption.""" +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional + +try: + import pymongocrypt # noqa: F401 + + _HAVE_PYMONGOCRYPT = True +except ImportError: + _HAVE_PYMONGOCRYPT = False +from bson import int64 +from pymongo.common import validate_is_mapping +from pymongo.errors import ConfigurationError +from pymongo.uri_parser import _parse_kms_tls_options + +if TYPE_CHECKING: + from pymongo.mongo_client import MongoClient + + +class AutoEncryptionOpts: + """Options to configure automatic client-side field level encryption.""" + + def __init__( + self, + kms_providers: Mapping[str, Any], + key_vault_namespace: str, + key_vault_client: Optional[MongoClient] = None, + schema_map: Optional[Mapping[str, Any]] = None, + bypass_auto_encryption: bool = False, + mongocryptd_uri: str = "mongodb://localhost:27020", + mongocryptd_bypass_spawn: bool = False, + mongocryptd_spawn_path: str = "mongocryptd", + mongocryptd_spawn_args: Optional[List[str]] = None, + kms_tls_options: Optional[Mapping[str, Any]] = None, + crypt_shared_lib_path: Optional[str] = None, + crypt_shared_lib_required: bool = False, + bypass_query_analysis: bool = False, + encrypted_fields_map: Optional[Mapping] = None, + ) -> None: + """Options to configure automatic client-side field level encryption. + + Automatic client-side field level encryption requires MongoDB 4.2 + enterprise or a MongoDB 4.2 Atlas cluster. Automatic encryption is not + supported for operations on a database or view and will result in + error. + + Although automatic encryption requires MongoDB 4.2 enterprise or a + MongoDB 4.2 Atlas cluster, automatic *decryption* is supported for all + users. To configure automatic *decryption* without automatic + *encryption* set ``bypass_auto_encryption=True``. Explicit + encryption and explicit decryption is also supported for all users + with the :class:`~pymongo.encryption.ClientEncryption` class. + + See :ref:`automatic-client-side-encryption` for an example. + + :Parameters: + - `kms_providers`: Map of KMS provider options. The `kms_providers` + map values differ by provider: + + - `aws`: Map with "accessKeyId" and "secretAccessKey" as strings. + These are the AWS access key ID and AWS secret access key used + to generate KMS messages. An optional "sessionToken" may be + included to support temporary AWS credentials. + - `azure`: Map with "tenantId", "clientId", and "clientSecret" as + strings. Additionally, "identityPlatformEndpoint" may also be + specified as a string (defaults to 'login.microsoftonline.com'). + These are the Azure Active Directory credentials used to + generate Azure Key Vault messages. + - `gcp`: Map with "email" as a string and "privateKey" + as `bytes` or a base64 encoded string. + Additionally, "endpoint" may also be specified as a string + (defaults to 'oauth2.googleapis.com'). These are the + credentials used to generate Google Cloud KMS messages. + - `kmip`: Map with "endpoint" as a host with required port. + For example: ``{"endpoint": "example.com:443"}``. + - `local`: Map with "key" as `bytes` (96 bytes in length) or + a base64 encoded string which decodes + to 96 bytes. "key" is the master key used to encrypt/decrypt + data keys. This key should be generated and stored as securely + as possible. + + - `key_vault_namespace`: The namespace for the key vault collection. + The key vault collection contains all data keys used for encryption + and decryption. Data keys are stored as documents in this MongoDB + collection. Data keys are protected with encryption by a KMS + provider. + - `key_vault_client` (optional): By default the key vault collection + is assumed to reside in the same MongoDB cluster as the encrypted + MongoClient. Use this option to route data key queries to a + separate MongoDB cluster. + - `schema_map` (optional): Map of collection namespace ("db.coll") to + JSON Schema. By default, a collection's JSONSchema is periodically + polled with the listCollections command. But a JSONSchema may be + specified locally with the schemaMap option. + + **Supplying a `schema_map` provides more security than relying on + JSON Schemas obtained from the server. It protects against a + malicious server advertising a false JSON Schema, which could trick + the client into sending unencrypted data that should be + encrypted.** + + Schemas supplied in the schemaMap only apply to configuring + automatic encryption for client side encryption. Other validation + rules in the JSON schema will not be enforced by the driver and + will result in an error. + - `bypass_auto_encryption` (optional): If ``True``, automatic + encryption will be disabled but automatic decryption will still be + enabled. Defaults to ``False``. + - `mongocryptd_uri` (optional): The MongoDB URI used to connect + to the *local* mongocryptd process. Defaults to + ``'mongodb://localhost:27020'``. + - `mongocryptd_bypass_spawn` (optional): If ``True``, the encrypted + MongoClient will not attempt to spawn the mongocryptd process. + Defaults to ``False``. + - `mongocryptd_spawn_path` (optional): Used for spawning the + mongocryptd process. Defaults to ``'mongocryptd'`` and spawns + mongocryptd from the system path. + - `mongocryptd_spawn_args` (optional): A list of string arguments to + use when spawning the mongocryptd process. Defaults to + ``['--idleShutdownTimeoutSecs=60']``. If the list does not include + the ``idleShutdownTimeoutSecs`` option then + ``'--idleShutdownTimeoutSecs=60'`` will be added. + - `kms_tls_options` (optional): A map of KMS provider names to TLS + options to use when creating secure connections to KMS providers. + Accepts the same TLS options as + :class:`pymongo.mongo_client.MongoClient`. For example, to + override the system default CA file:: + + kms_tls_options={'kmip': {'tlsCAFile': certifi.where()}} + + Or to supply a client certificate:: + + kms_tls_options={'kmip': {'tlsCertificateKeyFile': 'client.pem'}} + - `crypt_shared_lib_path` (optional): Override the path to load the crypt_shared library. + - `crypt_shared_lib_required` (optional): If True, raise an error if libmongocrypt is + unable to load the crypt_shared library. + - `bypass_query_analysis` (optional): If ``True``, disable automatic analysis + of outgoing commands. Set `bypass_query_analysis` to use explicit + encryption on indexed fields without the MongoDB Enterprise Advanced + licensed crypt_shared library. + - `encrypted_fields_map`: Map of collection namespace ("db.coll") to documents + that described the encrypted fields for Queryable Encryption. For example:: + + { + "db.encryptedCollection": { + "escCollection": "enxcol_.encryptedCollection.esc", + "ecocCollection": "enxcol_.encryptedCollection.ecoc", + "fields": [ + { + "path": "firstName", + "keyId": Binary.from_uuid(UUID('00000000-0000-0000-0000-000000000000')), + "bsonType": "string", + "queries": {"queryType": "equality"} + }, + { + "path": "ssn", + "keyId": Binary.from_uuid(UUID('04104104-1041-0410-4104-104104104104')), + "bsonType": "string" + } + ] + } + } + + .. versionchanged:: 4.2 + Added `encrypted_fields_map` `crypt_shared_lib_path`, `crypt_shared_lib_required`, + and `bypass_query_analysis` parameters. + + .. versionchanged:: 4.0 + Added the `kms_tls_options` parameter and the "kmip" KMS provider. + + .. versionadded:: 3.9 + """ + if not _HAVE_PYMONGOCRYPT: + raise ConfigurationError( + "client side encryption requires the pymongocrypt library: " + "install a compatible version with: " + "python -m pip install 'pymongo[encryption]'" + ) + if encrypted_fields_map: + validate_is_mapping("encrypted_fields_map", encrypted_fields_map) + self._encrypted_fields_map = encrypted_fields_map + self._bypass_query_analysis = bypass_query_analysis + self._crypt_shared_lib_path = crypt_shared_lib_path + self._crypt_shared_lib_required = crypt_shared_lib_required + self._kms_providers = kms_providers + self._key_vault_namespace = key_vault_namespace + self._key_vault_client = key_vault_client + self._schema_map = schema_map + self._bypass_auto_encryption = bypass_auto_encryption + self._mongocryptd_uri = mongocryptd_uri + self._mongocryptd_bypass_spawn = mongocryptd_bypass_spawn + self._mongocryptd_spawn_path = mongocryptd_spawn_path + if mongocryptd_spawn_args is None: + mongocryptd_spawn_args = ["--idleShutdownTimeoutSecs=60"] + self._mongocryptd_spawn_args = mongocryptd_spawn_args + if not isinstance(self._mongocryptd_spawn_args, list): + raise TypeError("mongocryptd_spawn_args must be a list") + if not any("idleShutdownTimeoutSecs" in s for s in self._mongocryptd_spawn_args): + self._mongocryptd_spawn_args.append("--idleShutdownTimeoutSecs=60") + # Maps KMS provider name to a SSLContext. + self._kms_ssl_contexts = _parse_kms_tls_options(kms_tls_options) + self._bypass_query_analysis = bypass_query_analysis + + +class RangeOpts: + """Options to configure encrypted queries using the rangePreview algorithm.""" + + def __init__( + self, + sparsity: int, + min: Optional[Any] = None, + max: Optional[Any] = None, + precision: Optional[int] = None, + ) -> None: + """Options to configure encrypted queries using the rangePreview algorithm. + + .. note:: This feature is experimental only, and not intended for public use. + + :Parameters: + - `sparsity`: An integer. + - `min`: A BSON scalar value corresponding to the type being queried. + - `max`: A BSON scalar value corresponding to the type being queried. + - `precision`: An integer, may only be set for double or decimal128 types. + + .. versionadded:: 4.4 + """ + self.min = min + self.max = max + self.sparsity = sparsity + self.precision = precision + + @property + def document(self) -> Dict[str, Any]: + doc = {} + for k, v in [ + ("sparsity", int64.Int64(self.sparsity)), + ("precision", self.precision), + ("min", self.min), + ("max", self.max), + ]: + if v is not None: + doc[k] = v + return doc diff --git a/backend/test/lib/python3.8/site-packages/pymongo/errors.py b/backend/test/lib/python3.8/site-packages/pymongo/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..c2cc6bbb6cfd3a089a67b124546d61f84d6451bf --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/errors.py @@ -0,0 +1,394 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Exceptions raised by PyMongo.""" +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Any, + Iterable, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) + +from bson.errors import InvalidDocument + +if TYPE_CHECKING: + from pymongo.typings import _DocumentOut + +try: + # CPython 3.7+ + from ssl import SSLCertVerificationError as _CertificateError +except ImportError: + try: + from ssl import CertificateError as _CertificateError + except ImportError: + + class _CertificateError(ValueError): # type: ignore + pass + + +class PyMongoError(Exception): + """Base class for all PyMongo exceptions.""" + + def __init__(self, message: str = "", error_labels: Optional[Iterable[str]] = None) -> None: + super().__init__(message) + self._message = message + self._error_labels = set(error_labels or []) + + def has_error_label(self, label: str) -> bool: + """Return True if this error contains the given label. + + .. versionadded:: 3.7 + """ + return label in self._error_labels + + def _add_error_label(self, label: str) -> None: + """Add the given label to this error.""" + self._error_labels.add(label) + + def _remove_error_label(self, label: str) -> None: + """Remove the given label from this error.""" + self._error_labels.discard(label) + + @property + def timeout(self) -> bool: + """True if this error was caused by a timeout. + + .. versionadded:: 4.2 + """ + return False + + +class ProtocolError(PyMongoError): + """Raised for failures related to the wire protocol.""" + + +class ConnectionFailure(PyMongoError): + """Raised when a connection to the database cannot be made or is lost.""" + + +class WaitQueueTimeoutError(ConnectionFailure): + """Raised when an operation times out waiting to checkout a connection from the pool. + + Subclass of :exc:`~pymongo.errors.ConnectionFailure`. + + .. versionadded:: 4.2 + """ + + @property + def timeout(self) -> bool: + return True + + +class AutoReconnect(ConnectionFailure): + """Raised when a connection to the database is lost and an attempt to + auto-reconnect will be made. + + In order to auto-reconnect you must handle this exception, recognizing that + the operation which caused it has not necessarily succeeded. Future + operations will attempt to open a new connection to the database (and + will continue to raise this exception until the first successful + connection is made). + + Subclass of :exc:`~pymongo.errors.ConnectionFailure`. + """ + + errors: Union[Mapping[str, Any], Sequence] + details: Union[Mapping[str, Any], Sequence] + + def __init__( + self, message: str = "", errors: Optional[Union[Mapping[str, Any], Sequence]] = None + ) -> None: + error_labels = None + if errors is not None: + if isinstance(errors, dict): + error_labels = errors.get("errorLabels") + super().__init__(message, error_labels) + self.errors = self.details = errors or [] + + +class NetworkTimeout(AutoReconnect): + """An operation on an open connection exceeded socketTimeoutMS. + + The remaining connections in the pool stay open. In the case of a write + operation, you cannot know whether it succeeded or failed. + + Subclass of :exc:`~pymongo.errors.AutoReconnect`. + """ + + @property + def timeout(self) -> bool: + return True + + +def _format_detailed_error(message: str, details: Optional[Union[Mapping[str, Any], List]]) -> str: + if details is not None: + message = f"{message}, full error: {details}" + return message + + +class NotPrimaryError(AutoReconnect): + """The server responded "not primary" or "node is recovering". + + These errors result from a query, write, or command. The operation failed + because the client thought it was using the primary but the primary has + stepped down, or the client thought it was using a healthy secondary but + the secondary is stale and trying to recover. + + The client launches a refresh operation on a background thread, to update + its view of the server as soon as possible after throwing this exception. + + Subclass of :exc:`~pymongo.errors.AutoReconnect`. + + .. versionadded:: 3.12 + """ + + def __init__( + self, message: str = "", errors: Optional[Union[Mapping[str, Any], List]] = None + ) -> None: + super().__init__(_format_detailed_error(message, errors), errors=errors) + + +class ServerSelectionTimeoutError(AutoReconnect): + """Thrown when no MongoDB server is available for an operation + + If there is no suitable server for an operation PyMongo tries for + ``serverSelectionTimeoutMS`` (default 30 seconds) to find one, then + throws this exception. For example, it is thrown after attempting an + operation when PyMongo cannot connect to any server, or if you attempt + an insert into a replica set that has no primary and does not elect one + within the timeout window, or if you attempt to query with a Read + Preference that the replica set cannot satisfy. + """ + + @property + def timeout(self) -> bool: + return True + + +class ConfigurationError(PyMongoError): + """Raised when something is incorrectly configured.""" + + +class OperationFailure(PyMongoError): + """Raised when a database operation fails. + + .. versionadded:: 2.7 + The :attr:`details` attribute. + """ + + def __init__( + self, + error: str, + code: Optional[int] = None, + details: Optional[Mapping[str, Any]] = None, + max_wire_version: Optional[int] = None, + ) -> None: + error_labels = None + if details is not None: + error_labels = details.get("errorLabels") + super().__init__(_format_detailed_error(error, details), error_labels=error_labels) + self.__code = code + self.__details = details + self.__max_wire_version = max_wire_version + + @property + def _max_wire_version(self) -> Optional[int]: + return self.__max_wire_version + + @property + def code(self) -> Optional[int]: + """The error code returned by the server, if any.""" + return self.__code + + @property + def details(self) -> Optional[Mapping[str, Any]]: + """The complete error document returned by the server. + + Depending on the error that occurred, the error document + may include useful information beyond just the error + message. When connected to a mongos the error document + may contain one or more subdocuments if errors occurred + on multiple shards. + """ + return self.__details + + @property + def timeout(self) -> bool: + return self.__code in (50,) + + +class CursorNotFound(OperationFailure): + """Raised while iterating query results if the cursor is + invalidated on the server. + + .. versionadded:: 2.7 + """ + + +class ExecutionTimeout(OperationFailure): + """Raised when a database operation times out, exceeding the $maxTimeMS + set in the query or command option. + + .. note:: Requires server version **>= 2.6.0** + + .. versionadded:: 2.7 + """ + + @property + def timeout(self) -> bool: + return True + + +class WriteConcernError(OperationFailure): + """Base exception type for errors raised due to write concern. + + .. versionadded:: 3.0 + """ + + +class WriteError(OperationFailure): + """Base exception type for errors raised during write operations. + + .. versionadded:: 3.0 + """ + + +class WTimeoutError(WriteConcernError): + """Raised when a database operation times out (i.e. wtimeout expires) + before replication completes. + + With newer versions of MongoDB the `details` attribute may include + write concern fields like 'n', 'updatedExisting', or 'writtenTo'. + + .. versionadded:: 2.7 + """ + + @property + def timeout(self) -> bool: + return True + + +class DuplicateKeyError(WriteError): + """Raised when an insert or update fails due to a duplicate key error.""" + + +def _wtimeout_error(error: Any) -> bool: + """Return True if this writeConcernError doc is a caused by a timeout.""" + return error.get("code") == 50 or ("errInfo" in error and error["errInfo"].get("wtimeout")) + + +class BulkWriteError(OperationFailure): + """Exception class for bulk write errors. + + .. versionadded:: 2.7 + """ + + details: _DocumentOut + + def __init__(self, results: _DocumentOut) -> None: + super().__init__("batch op errors occurred", 65, results) + + def __reduce__(self) -> Tuple[Any, Any]: + return self.__class__, (self.details,) + + @property + def timeout(self) -> bool: + # Check the last writeConcernError and last writeError to determine if this + # BulkWriteError was caused by a timeout. + wces = self.details.get("writeConcernErrors", []) + if wces and _wtimeout_error(wces[-1]): + return True + + werrs = self.details.get("writeErrors", []) + if werrs and werrs[-1].get("code") == 50: + return True + return False + + +class InvalidOperation(PyMongoError): + """Raised when a client attempts to perform an invalid operation.""" + + +class InvalidName(PyMongoError): + """Raised when an invalid name is used.""" + + +class CollectionInvalid(PyMongoError): + """Raised when collection validation fails.""" + + +class InvalidURI(ConfigurationError): + """Raised when trying to parse an invalid mongodb URI.""" + + +class DocumentTooLarge(InvalidDocument): + """Raised when an encoded document is too large for the connected server.""" + + +class EncryptionError(PyMongoError): + """Raised when encryption or decryption fails. + + This error always wraps another exception which can be retrieved via the + :attr:`cause` property. + + .. versionadded:: 3.9 + """ + + def __init__(self, cause: Exception) -> None: + super().__init__(str(cause)) + self.__cause = cause + + @property + def cause(self) -> Exception: + """The exception that caused this encryption or decryption error.""" + return self.__cause + + @property + def timeout(self) -> bool: + if isinstance(self.__cause, PyMongoError): + return self.__cause.timeout + return False + + +class EncryptedCollectionError(EncryptionError): + """Raised when creating a collection with encrypted_fields fails. + + .. versionadded:: 4.4 + """ + + def __init__(self, cause: Exception, encrypted_fields: Mapping[str, Any]) -> None: + super().__init__(cause) + self.__encrypted_fields = encrypted_fields + + @property + def encrypted_fields(self) -> Mapping[str, Any]: + """The encrypted_fields document that allows inferring which data keys are *known* to be created. + + Note that the returned document is not guaranteed to contain information about *all* of the data keys that + were created, for example in the case of an indefinite error like a timeout. Use the `cause` property to + determine whether a definite or indefinite error caused this error, and only rely on the accuracy of the + encrypted_fields if the error is definite. + """ + return self.__encrypted_fields + + +class _OperationCancelled(AutoReconnect): + """Internal error raised when a socket operation is cancelled.""" diff --git a/backend/test/lib/python3.8/site-packages/pymongo/event_loggers.py b/backend/test/lib/python3.8/site-packages/pymongo/event_loggers.py new file mode 100644 index 0000000000000000000000000000000000000000..70c386ab04f0a68786a6f396642b315f79dbee28 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/event_loggers.py @@ -0,0 +1,221 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +"""Example event logger classes. + +.. versionadded:: 3.11 + +These loggers can be registered using :func:`register` or +:class:`~pymongo.mongo_client.MongoClient`. + +``monitoring.register(CommandLogger())`` + +or + +``MongoClient(event_listeners=[CommandLogger()])`` +""" +import logging + +from pymongo import monitoring + + +class CommandLogger(monitoring.CommandListener): + """A simple listener that logs command events. + + Listens for :class:`~pymongo.monitoring.CommandStartedEvent`, + :class:`~pymongo.monitoring.CommandSucceededEvent` and + :class:`~pymongo.monitoring.CommandFailedEvent` events and + logs them at the `INFO` severity level using :mod:`logging`. + .. versionadded:: 3.11 + """ + + def started(self, event: monitoring.CommandStartedEvent) -> None: + logging.info( + f"Command {event.command_name} with request id " + f"{event.request_id} started on server " + f"{event.connection_id}" + ) + + def succeeded(self, event: monitoring.CommandSucceededEvent) -> None: + logging.info( + f"Command {event.command_name} with request id " + f"{event.request_id} on server {event.connection_id} " + f"succeeded in {event.duration_micros} " + "microseconds" + ) + + def failed(self, event: monitoring.CommandFailedEvent) -> None: + logging.info( + f"Command {event.command_name} with request id " + f"{event.request_id} on server {event.connection_id} " + f"failed in {event.duration_micros} " + "microseconds" + ) + + +class ServerLogger(monitoring.ServerListener): + """A simple listener that logs server discovery events. + + Listens for :class:`~pymongo.monitoring.ServerOpeningEvent`, + :class:`~pymongo.monitoring.ServerDescriptionChangedEvent`, + and :class:`~pymongo.monitoring.ServerClosedEvent` + events and logs them at the `INFO` severity level using :mod:`logging`. + + .. versionadded:: 3.11 + """ + + def opened(self, event: monitoring.ServerOpeningEvent) -> None: + logging.info(f"Server {event.server_address} added to topology {event.topology_id}") + + def description_changed(self, event: monitoring.ServerDescriptionChangedEvent) -> None: + previous_server_type = event.previous_description.server_type + new_server_type = event.new_description.server_type + if new_server_type != previous_server_type: + # server_type_name was added in PyMongo 3.4 + logging.info( + f"Server {event.server_address} changed type from " + f"{event.previous_description.server_type_name} to " + f"{event.new_description.server_type_name}" + ) + + def closed(self, event: monitoring.ServerClosedEvent) -> None: + logging.warning(f"Server {event.server_address} removed from topology {event.topology_id}") + + +class HeartbeatLogger(monitoring.ServerHeartbeatListener): + """A simple listener that logs server heartbeat events. + + Listens for :class:`~pymongo.monitoring.ServerHeartbeatStartedEvent`, + :class:`~pymongo.monitoring.ServerHeartbeatSucceededEvent`, + and :class:`~pymongo.monitoring.ServerHeartbeatFailedEvent` + events and logs them at the `INFO` severity level using :mod:`logging`. + + .. versionadded:: 3.11 + """ + + def started(self, event: monitoring.ServerHeartbeatStartedEvent) -> None: + logging.info(f"Heartbeat sent to server {event.connection_id}") + + def succeeded(self, event: monitoring.ServerHeartbeatSucceededEvent) -> None: + # The reply.document attribute was added in PyMongo 3.4. + logging.info( + f"Heartbeat to server {event.connection_id} " + "succeeded with reply " + f"{event.reply.document}" + ) + + def failed(self, event: monitoring.ServerHeartbeatFailedEvent) -> None: + logging.warning( + f"Heartbeat to server {event.connection_id} failed with error {event.reply}" + ) + + +class TopologyLogger(monitoring.TopologyListener): + """A simple listener that logs server topology events. + + Listens for :class:`~pymongo.monitoring.TopologyOpenedEvent`, + :class:`~pymongo.monitoring.TopologyDescriptionChangedEvent`, + and :class:`~pymongo.monitoring.TopologyClosedEvent` + events and logs them at the `INFO` severity level using :mod:`logging`. + + .. versionadded:: 3.11 + """ + + def opened(self, event: monitoring.TopologyOpenedEvent) -> None: + logging.info(f"Topology with id {event.topology_id} opened") + + def description_changed(self, event: monitoring.TopologyDescriptionChangedEvent) -> None: + logging.info(f"Topology description updated for topology id {event.topology_id}") + previous_topology_type = event.previous_description.topology_type + new_topology_type = event.new_description.topology_type + if new_topology_type != previous_topology_type: + # topology_type_name was added in PyMongo 3.4 + logging.info( + f"Topology {event.topology_id} changed type from " + f"{event.previous_description.topology_type_name} to " + f"{event.new_description.topology_type_name}" + ) + # The has_writable_server and has_readable_server methods + # were added in PyMongo 3.4. + if not event.new_description.has_writable_server(): + logging.warning("No writable servers available.") + if not event.new_description.has_readable_server(): + logging.warning("No readable servers available.") + + def closed(self, event: monitoring.TopologyClosedEvent) -> None: + logging.info(f"Topology with id {event.topology_id} closed") + + +class ConnectionPoolLogger(monitoring.ConnectionPoolListener): + """A simple listener that logs server connection pool events. + + Listens for :class:`~pymongo.monitoring.PoolCreatedEvent`, + :class:`~pymongo.monitoring.PoolClearedEvent`, + :class:`~pymongo.monitoring.PoolClosedEvent`, + :~pymongo.monitoring.class:`ConnectionCreatedEvent`, + :class:`~pymongo.monitoring.ConnectionReadyEvent`, + :class:`~pymongo.monitoring.ConnectionClosedEvent`, + :class:`~pymongo.monitoring.ConnectionCheckOutStartedEvent`, + :class:`~pymongo.monitoring.ConnectionCheckOutFailedEvent`, + :class:`~pymongo.monitoring.ConnectionCheckedOutEvent`, + and :class:`~pymongo.monitoring.ConnectionCheckedInEvent` + events and logs them at the `INFO` severity level using :mod:`logging`. + + .. versionadded:: 3.11 + """ + + def pool_created(self, event: monitoring.PoolCreatedEvent) -> None: + logging.info(f"[pool {event.address}] pool created") + + def pool_ready(self, event: monitoring.PoolReadyEvent) -> None: + logging.info(f"[pool {event.address}] pool ready") + + def pool_cleared(self, event: monitoring.PoolClearedEvent) -> None: + logging.info(f"[pool {event.address}] pool cleared") + + def pool_closed(self, event: monitoring.PoolClosedEvent) -> None: + logging.info(f"[pool {event.address}] pool closed") + + def connection_created(self, event: monitoring.ConnectionCreatedEvent) -> None: + logging.info(f"[pool {event.address}][conn #{event.connection_id}] connection created") + + def connection_ready(self, event: monitoring.ConnectionReadyEvent) -> None: + logging.info( + f"[pool {event.address}][conn #{event.connection_id}] connection setup succeeded" + ) + + def connection_closed(self, event: monitoring.ConnectionClosedEvent) -> None: + logging.info( + f"[pool {event.address}][conn #{event.connection_id}] " + f'connection closed, reason: "{event.reason}"' + ) + + def connection_check_out_started( + self, event: monitoring.ConnectionCheckOutStartedEvent + ) -> None: + logging.info(f"[pool {event.address}] connection check out started") + + def connection_check_out_failed(self, event: monitoring.ConnectionCheckOutFailedEvent) -> None: + logging.info(f"[pool {event.address}] connection check out failed, reason: {event.reason}") + + def connection_checked_out(self, event: monitoring.ConnectionCheckedOutEvent) -> None: + logging.info( + f"[pool {event.address}][conn #{event.connection_id}] connection checked out of pool" + ) + + def connection_checked_in(self, event: monitoring.ConnectionCheckedInEvent) -> None: + logging.info( + f"[pool {event.address}][conn #{event.connection_id}] connection checked into pool" + ) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/hello.py b/backend/test/lib/python3.8/site-packages/pymongo/hello.py new file mode 100644 index 0000000000000000000000000000000000000000..1715beb5cfdef6fa6f4fc6220e941ef4576ad1e1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/hello.py @@ -0,0 +1,219 @@ +# Copyright 2021-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for the 'hello' and legacy hello commands.""" + +import copy +import datetime +import itertools +from typing import Any, Generic, List, Mapping, Optional, Set, Tuple + +from bson.objectid import ObjectId +from pymongo import common +from pymongo.server_type import SERVER_TYPE +from pymongo.typings import ClusterTime, _DocumentType + + +class HelloCompat: + CMD = "hello" + LEGACY_CMD = "ismaster" + PRIMARY = "isWritablePrimary" + LEGACY_PRIMARY = "ismaster" + LEGACY_ERROR = "not master" + + +def _get_server_type(doc: Mapping[str, Any]) -> int: + """Determine the server type from a hello response.""" + if not doc.get("ok"): + return SERVER_TYPE.Unknown + + if doc.get("serviceId"): + return SERVER_TYPE.LoadBalancer + elif doc.get("isreplicaset"): + return SERVER_TYPE.RSGhost + elif doc.get("setName"): + if doc.get("hidden"): + return SERVER_TYPE.RSOther + elif doc.get(HelloCompat.PRIMARY): + return SERVER_TYPE.RSPrimary + elif doc.get(HelloCompat.LEGACY_PRIMARY): + return SERVER_TYPE.RSPrimary + elif doc.get("secondary"): + return SERVER_TYPE.RSSecondary + elif doc.get("arbiterOnly"): + return SERVER_TYPE.RSArbiter + else: + return SERVER_TYPE.RSOther + elif doc.get("msg") == "isdbgrid": + return SERVER_TYPE.Mongos + else: + return SERVER_TYPE.Standalone + + +class Hello(Generic[_DocumentType]): + """Parse a hello response from the server. + + .. versionadded:: 3.12 + """ + + __slots__ = ("_doc", "_server_type", "_is_writable", "_is_readable", "_awaitable") + + def __init__(self, doc: _DocumentType, awaitable: bool = False) -> None: + self._server_type = _get_server_type(doc) + self._doc: _DocumentType = doc + self._is_writable = self._server_type in ( + SERVER_TYPE.RSPrimary, + SERVER_TYPE.Standalone, + SERVER_TYPE.Mongos, + SERVER_TYPE.LoadBalancer, + ) + + self._is_readable = self.server_type == SERVER_TYPE.RSSecondary or self._is_writable + self._awaitable = awaitable + + @property + def document(self) -> _DocumentType: + """The complete hello command response document. + + .. versionadded:: 3.4 + """ + return copy.copy(self._doc) + + @property + def server_type(self) -> int: + return self._server_type + + @property + def all_hosts(self) -> Set[Tuple[str, int]]: + """List of hosts, passives, and arbiters known to this server.""" + return set( + map( + common.clean_node, + itertools.chain( + self._doc.get("hosts", []), + self._doc.get("passives", []), + self._doc.get("arbiters", []), + ), + ) + ) + + @property + def tags(self) -> Mapping[str, Any]: + """Replica set member tags or empty dict.""" + return self._doc.get("tags", {}) + + @property + def primary(self) -> Optional[Tuple[str, int]]: + """This server's opinion about who the primary is, or None.""" + if self._doc.get("primary"): + return common.partition_node(self._doc["primary"]) + else: + return None + + @property + def replica_set_name(self) -> Optional[str]: + """Replica set name or None.""" + return self._doc.get("setName") + + @property + def max_bson_size(self) -> int: + return self._doc.get("maxBsonObjectSize", common.MAX_BSON_SIZE) + + @property + def max_message_size(self) -> int: + return self._doc.get("maxMessageSizeBytes", 2 * self.max_bson_size) + + @property + def max_write_batch_size(self) -> int: + return self._doc.get("maxWriteBatchSize", common.MAX_WRITE_BATCH_SIZE) + + @property + def min_wire_version(self) -> int: + return self._doc.get("minWireVersion", common.MIN_WIRE_VERSION) + + @property + def max_wire_version(self) -> int: + return self._doc.get("maxWireVersion", common.MAX_WIRE_VERSION) + + @property + def set_version(self) -> Optional[int]: + return self._doc.get("setVersion") + + @property + def election_id(self) -> Optional[ObjectId]: + return self._doc.get("electionId") + + @property + def cluster_time(self) -> Optional[ClusterTime]: + return self._doc.get("$clusterTime") + + @property + def logical_session_timeout_minutes(self) -> Optional[int]: + return self._doc.get("logicalSessionTimeoutMinutes") + + @property + def is_writable(self) -> bool: + return self._is_writable + + @property + def is_readable(self) -> bool: + return self._is_readable + + @property + def me(self) -> Optional[Tuple[str, int]]: + me = self._doc.get("me") + if me: + return common.clean_node(me) + return None + + @property + def last_write_date(self) -> Optional[datetime.datetime]: + return self._doc.get("lastWrite", {}).get("lastWriteDate") + + @property + def compressors(self) -> Optional[List[str]]: + return self._doc.get("compression") + + @property + def sasl_supported_mechs(self) -> List[str]: + """Supported authentication mechanisms for the current user. + + For example:: + + >>> hello.sasl_supported_mechs + ["SCRAM-SHA-1", "SCRAM-SHA-256"] + + """ + return self._doc.get("saslSupportedMechs", []) + + @property + def speculative_authenticate(self) -> Optional[Mapping[str, Any]]: + """The speculativeAuthenticate field.""" + return self._doc.get("speculativeAuthenticate") + + @property + def topology_version(self) -> Optional[Mapping[str, Any]]: + return self._doc.get("topologyVersion") + + @property + def awaitable(self) -> bool: + return self._awaitable + + @property + def service_id(self) -> Optional[ObjectId]: + return self._doc.get("serviceId") + + @property + def hello_ok(self) -> bool: + return self._doc.get("helloOk", False) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/helpers.py b/backend/test/lib/python3.8/site-packages/pymongo/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..afab67e4ea81a5f25c8cd4aa34a5ed0420e55ecb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/helpers.py @@ -0,0 +1,338 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Bits and pieces used by the driver that don't really fit elsewhere.""" +from __future__ import annotations + +import sys +import traceback +from collections import abc +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Container, + Iterable, + List, + Mapping, + NoReturn, + Optional, + Sequence, + Tuple, + TypeVar, + Union, + cast, +) + +from bson.son import SON +from pymongo import ASCENDING +from pymongo.errors import ( + CursorNotFound, + DuplicateKeyError, + ExecutionTimeout, + NotPrimaryError, + OperationFailure, + WriteConcernError, + WriteError, + WTimeoutError, + _wtimeout_error, +) +from pymongo.hello import HelloCompat + +if TYPE_CHECKING: + from pymongo.cursor import _Hint + from pymongo.operations import _IndexList + from pymongo.typings import _DocumentOut + +# From the SDAM spec, the "node is shutting down" codes. +_SHUTDOWN_CODES: frozenset = frozenset( + [ + 11600, # InterruptedAtShutdown + 91, # ShutdownInProgress + ] +) +# From the SDAM spec, the "not primary" error codes are combined with the +# "node is recovering" error codes (of which the "node is shutting down" +# errors are a subset). +_NOT_PRIMARY_CODES: frozenset = ( + frozenset( + [ + 10058, # LegacyNotPrimary <=3.2 "not primary" error code + 10107, # NotWritablePrimary + 13435, # NotPrimaryNoSecondaryOk + 11602, # InterruptedDueToReplStateChange + 13436, # NotPrimaryOrSecondary + 189, # PrimarySteppedDown + ] + ) + | _SHUTDOWN_CODES +) +# From the retryable writes spec. +_RETRYABLE_ERROR_CODES: frozenset = _NOT_PRIMARY_CODES | frozenset( + [ + 7, # HostNotFound + 6, # HostUnreachable + 89, # NetworkTimeout + 9001, # SocketException + 262, # ExceededTimeLimit + ] +) + +# Server code raised when re-authentication is required +_REAUTHENTICATION_REQUIRED_CODE: int = 391 + + +def _gen_index_name(keys: _IndexList) -> str: + """Generate an index name from the set of fields it is over.""" + return "_".join(["{}_{}".format(*item) for item in keys]) + + +def _index_list( + key_or_list: _Hint, direction: Optional[Union[int, str]] = None +) -> Sequence[Tuple[str, Union[int, str, Mapping[str, Any]]]]: + """Helper to generate a list of (key, direction) pairs. + + Takes such a list, or a single key, or a single key and direction. + """ + if direction is not None: + if not isinstance(key_or_list, str): + raise TypeError("Expected a string and a direction") + return [(key_or_list, direction)] + else: + if isinstance(key_or_list, str): + return [(key_or_list, ASCENDING)] + if isinstance(key_or_list, abc.ItemsView): + return list(key_or_list) + elif not isinstance(key_or_list, (list, tuple)): + raise TypeError("if no direction is specified, key_or_list must be an instance of list") + values: List[Tuple[str, int]] = [] + for item in key_or_list: + if isinstance(item, str): + item = (item, ASCENDING) + values.append(item) + return values + + +def _index_document(index_list: _IndexList) -> SON[str, Any]: + """Helper to generate an index specifying document. + + Takes a list of (key, direction) pairs. + """ + if isinstance(index_list, abc.Mapping): + raise TypeError( + "passing a dict to sort/create_index/hint is not " + "allowed - use a list of tuples instead. did you " + "mean %r?" % list(index_list.items()) + ) + elif not isinstance(index_list, (list, tuple)): + raise TypeError("must use a list of (key, direction) pairs, not: " + repr(index_list)) + if not len(index_list): + raise ValueError("key_or_list must not be the empty list") + + index: SON[str, Any] = SON() + for item in index_list: + if isinstance(item, str): + item = (item, ASCENDING) + key, value = item + if not isinstance(key, str): + raise TypeError("first item in each key pair must be an instance of str") + if not isinstance(value, (str, int, abc.Mapping)): + raise TypeError( + "second item in each key pair must be 1, -1, " + "'2d', or another valid MongoDB index specifier." + ) + index[key] = value + return index + + +def _check_command_response( + response: _DocumentOut, + max_wire_version: Optional[int], + allowable_errors: Optional[Container[Union[int, str]]] = None, + parse_write_concern_error: bool = False, +) -> None: + """Check the response to a command for errors.""" + if "ok" not in response: + # Server didn't recognize our message as a command. + raise OperationFailure( + response.get("$err"), # type: ignore[arg-type] + response.get("code"), + response, + max_wire_version, + ) + + if parse_write_concern_error and "writeConcernError" in response: + _error = response["writeConcernError"] + _labels = response.get("errorLabels") + if _labels: + _error.update({"errorLabels": _labels}) + _raise_write_concern_error(_error) + + if response["ok"]: + return + + details = response + # Mongos returns the error details in a 'raw' object + # for some errors. + if "raw" in response: + for shard in response["raw"].values(): + # Grab the first non-empty raw error from a shard. + if shard.get("errmsg") and not shard.get("ok"): + details = shard + break + + errmsg = details["errmsg"] + code = details.get("code") + + # For allowable errors, only check for error messages when the code is not + # included. + if allowable_errors: + if code is not None: + if code in allowable_errors: + return + elif errmsg in allowable_errors: + return + + # Server is "not primary" or "recovering" + if code is not None: + if code in _NOT_PRIMARY_CODES: + raise NotPrimaryError(errmsg, response) + elif HelloCompat.LEGACY_ERROR in errmsg or "node is recovering" in errmsg: + raise NotPrimaryError(errmsg, response) + + # Other errors + # findAndModify with upsert can raise duplicate key error + if code in (11000, 11001, 12582): + raise DuplicateKeyError(errmsg, code, response, max_wire_version) + elif code == 50: + raise ExecutionTimeout(errmsg, code, response, max_wire_version) + elif code == 43: + raise CursorNotFound(errmsg, code, response, max_wire_version) + + raise OperationFailure(errmsg, code, response, max_wire_version) + + +def _raise_last_write_error(write_errors: List[Any]) -> NoReturn: + # If the last batch had multiple errors only report + # the last error to emulate continue_on_error. + error = write_errors[-1] + if error.get("code") == 11000: + raise DuplicateKeyError(error.get("errmsg"), 11000, error) + raise WriteError(error.get("errmsg"), error.get("code"), error) + + +def _raise_write_concern_error(error: Any) -> NoReturn: + if _wtimeout_error(error): + # Make sure we raise WTimeoutError + raise WTimeoutError(error.get("errmsg"), error.get("code"), error) + raise WriteConcernError(error.get("errmsg"), error.get("code"), error) + + +def _get_wce_doc(result: Mapping[str, Any]) -> Optional[Mapping[str, Any]]: + """Return the writeConcernError or None.""" + wce = result.get("writeConcernError") + if wce: + # The server reports errorLabels at the top level but it's more + # convenient to attach it to the writeConcernError doc itself. + error_labels = result.get("errorLabels") + if error_labels: + wce["errorLabels"] = error_labels + return wce + + +def _check_write_command_response(result: Mapping[str, Any]) -> None: + """Backward compatibility helper for write command error handling.""" + # Prefer write errors over write concern errors + write_errors = result.get("writeErrors") + if write_errors: + _raise_last_write_error(write_errors) + + wce = _get_wce_doc(result) + if wce: + _raise_write_concern_error(wce) + + +def _fields_list_to_dict( + fields: Union[Mapping[str, Any], Iterable[str]], option_name: str +) -> Mapping[str, Any]: + """Takes a sequence of field names and returns a matching dictionary. + + ["a", "b"] becomes {"a": 1, "b": 1} + + and + + ["a.b.c", "d", "a.c"] becomes {"a.b.c": 1, "d": 1, "a.c": 1} + """ + if isinstance(fields, abc.Mapping): + return fields + + if isinstance(fields, (abc.Sequence, abc.Set)): + if not all(isinstance(field, str) for field in fields): + raise TypeError(f"{option_name} must be a list of key names, each an instance of str") + return dict.fromkeys(fields, 1) + + raise TypeError(f"{option_name} must be a mapping or list of key names") + + +def _handle_exception() -> None: + """Print exceptions raised by subscribers to stderr.""" + # Heavily influenced by logging.Handler.handleError. + + # See note here: + # https://docs.python.org/3.4/library/sys.html#sys.__stderr__ + if sys.stderr: + einfo = sys.exc_info() + try: + traceback.print_exception(einfo[0], einfo[1], einfo[2], None, sys.stderr) + except OSError: + pass + finally: + del einfo + + +# See https://mypy.readthedocs.io/en/stable/generics.html?#decorator-factories +F = TypeVar("F", bound=Callable[..., Any]) + + +def _handle_reauth(func: F) -> F: + def inner(*args: Any, **kwargs: Any) -> Any: + no_reauth = kwargs.pop("no_reauth", False) + from pymongo.message import _BulkWriteContext + from pymongo.pool import Connection + + try: + return func(*args, **kwargs) + except OperationFailure as exc: + if no_reauth: + raise + if exc.code == _REAUTHENTICATION_REQUIRED_CODE: + # Look for an argument that either is a Connection + # or has a connection attribute, so we can trigger + # a reauth. + conn = None + for arg in args: + if isinstance(arg, Connection): + conn = arg + break + if isinstance(arg, _BulkWriteContext): + conn = arg.conn + break + if conn: + conn.authenticate(reauthenticate=True) + else: + raise + return func(*args, **kwargs) + raise + + return cast(F, inner) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/lock.py b/backend/test/lib/python3.8/site-packages/pymongo/lock.py new file mode 100644 index 0000000000000000000000000000000000000000..741876afcbeebc9663acf96e0f52aa9b4b33209a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/lock.py @@ -0,0 +1,39 @@ +# Copyright 2022-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import threading +import weakref + +_HAS_REGISTER_AT_FORK = hasattr(os, "register_at_fork") + +# References to instances of _create_lock +_forkable_locks: weakref.WeakSet = weakref.WeakSet() + + +def _create_lock() -> threading.Lock: + """Represents a lock that is tracked upon instantiation using a WeakSet and + reset by pymongo upon forking. + """ + lock = threading.Lock() + if _HAS_REGISTER_AT_FORK: + _forkable_locks.add(lock) + return lock + + +def _release_locks() -> None: + # Completed the fork, reset all the locks in the child. + for lock in _forkable_locks: + if lock.locked(): + lock.release() diff --git a/backend/test/lib/python3.8/site-packages/pymongo/max_staleness_selectors.py b/backend/test/lib/python3.8/site-packages/pymongo/max_staleness_selectors.py new file mode 100644 index 0000000000000000000000000000000000000000..10c136a43e0560a21172ede0493e888002a6939a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/max_staleness_selectors.py @@ -0,0 +1,122 @@ +# Copyright 2016 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Criteria to select ServerDescriptions based on maxStalenessSeconds. + +The Max Staleness Spec says: When there is a known primary P, +a secondary S's staleness is estimated with this formula: + + (S.lastUpdateTime - S.lastWriteDate) - (P.lastUpdateTime - P.lastWriteDate) + + heartbeatFrequencyMS + +When there is no known primary, a secondary S's staleness is estimated with: + + SMax.lastWriteDate - S.lastWriteDate + heartbeatFrequencyMS + +where "SMax" is the secondary with the greatest lastWriteDate. +""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from pymongo.errors import ConfigurationError +from pymongo.server_type import SERVER_TYPE + +if TYPE_CHECKING: + from pymongo.server_selectors import Selection +# Constant defined in Max Staleness Spec: An idle primary writes a no-op every +# 10 seconds to refresh secondaries' lastWriteDate values. +IDLE_WRITE_PERIOD = 10 +SMALLEST_MAX_STALENESS = 90 + + +def _validate_max_staleness(max_staleness: int, heartbeat_frequency: int) -> None: + # We checked for max staleness -1 before this, it must be positive here. + if max_staleness < heartbeat_frequency + IDLE_WRITE_PERIOD: + raise ConfigurationError( + "maxStalenessSeconds must be at least heartbeatFrequencyMS +" + " %d seconds. maxStalenessSeconds is set to %d," + " heartbeatFrequencyMS is set to %d." + % (IDLE_WRITE_PERIOD, max_staleness, heartbeat_frequency * 1000) + ) + + if max_staleness < SMALLEST_MAX_STALENESS: + raise ConfigurationError( + "maxStalenessSeconds must be at least %d. " + "maxStalenessSeconds is set to %d." % (SMALLEST_MAX_STALENESS, max_staleness) + ) + + +def _with_primary(max_staleness: int, selection: Selection) -> Selection: + """Apply max_staleness, in seconds, to a Selection with a known primary.""" + primary = selection.primary + assert primary + sds = [] + + for s in selection.server_descriptions: + if s.server_type == SERVER_TYPE.RSSecondary: + # See max-staleness.rst for explanation of this formula. + assert s.last_write_date and primary.last_write_date + staleness = ( + (s.last_update_time - s.last_write_date) + - (primary.last_update_time - primary.last_write_date) + + selection.heartbeat_frequency + ) + + if staleness <= max_staleness: + sds.append(s) + else: + sds.append(s) + + return selection.with_server_descriptions(sds) + + +def _no_primary(max_staleness: int, selection: Selection) -> Selection: + """Apply max_staleness, in seconds, to a Selection with no known primary.""" + # Secondary that's replicated the most recent writes. + smax = selection.secondary_with_max_last_write_date() + if not smax: + # No secondaries and no primary, short-circuit out of here. + return selection.with_server_descriptions([]) + + sds = [] + + for s in selection.server_descriptions: + if s.server_type == SERVER_TYPE.RSSecondary: + # See max-staleness.rst for explanation of this formula. + assert smax.last_write_date and s.last_write_date + staleness = smax.last_write_date - s.last_write_date + selection.heartbeat_frequency + + if staleness <= max_staleness: + sds.append(s) + else: + sds.append(s) + + return selection.with_server_descriptions(sds) + + +def select(max_staleness: int, selection: Selection) -> Selection: + """Apply max_staleness, in seconds, to a Selection.""" + if max_staleness == -1: + return selection + + # Server Selection Spec: If the TopologyType is ReplicaSetWithPrimary or + # ReplicaSetNoPrimary, a client MUST raise an error if maxStaleness < + # heartbeatFrequency + IDLE_WRITE_PERIOD, or if maxStaleness < 90. + _validate_max_staleness(max_staleness, selection.heartbeat_frequency) + + if selection.primary: + return _with_primary(max_staleness, selection) + else: + return _no_primary(max_staleness, selection) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/message.py b/backend/test/lib/python3.8/site-packages/pymongo/message.py new file mode 100644 index 0000000000000000000000000000000000000000..5a4b1753f97cf20dea4baf234967436021ab964f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/message.py @@ -0,0 +1,1658 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for creating `messages +<https://www.mongodb.com/docs/manual/reference/mongodb-wire-protocol/>`_ to be sent to +MongoDB. + +.. note:: This module is for internal use and is generally not needed by + application developers. +""" +from __future__ import annotations + +import datetime +import random +import struct +from io import BytesIO as _BytesIO +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Tuple, + Union, + cast, +) + +import bson +from bson import CodecOptions, _decode_selective, _dict_to_bson, _make_c_string, encode +from bson.int64 import Int64 +from bson.raw_bson import ( + _RAW_ARRAY_BSON_OPTIONS, + DEFAULT_RAW_BSON_OPTIONS, + RawBSONDocument, + _inflate_bson, +) +from bson.son import SON + +try: + from pymongo import _cmessage # type: ignore[attr-defined] + + _use_c = True +except ImportError: + _use_c = False +from pymongo.errors import ( + ConfigurationError, + CursorNotFound, + DocumentTooLarge, + ExecutionTimeout, + InvalidOperation, + NotPrimaryError, + OperationFailure, + ProtocolError, +) +from pymongo.hello import HelloCompat +from pymongo.helpers import _handle_reauth +from pymongo.read_preferences import ReadPreference +from pymongo.write_concern import WriteConcern + +if TYPE_CHECKING: + from datetime import timedelta + + from pymongo.client_session import ClientSession + from pymongo.compression_support import SnappyContext, ZlibContext, ZstdContext + from pymongo.mongo_client import MongoClient + from pymongo.monitoring import _EventListeners + from pymongo.pool import Connection + from pymongo.read_concern import ReadConcern + from pymongo.read_preferences import _ServerMode + from pymongo.typings import _Address, _DocumentOut + +MAX_INT32 = 2147483647 +MIN_INT32 = -2147483648 + +# Overhead allowed for encoded command documents. +_COMMAND_OVERHEAD = 16382 + +_INSERT = 0 +_UPDATE = 1 +_DELETE = 2 + +_EMPTY = b"" +_BSONOBJ = b"\x03" +_ZERO_8 = b"\x00" +_ZERO_16 = b"\x00\x00" +_ZERO_32 = b"\x00\x00\x00\x00" +_ZERO_64 = b"\x00\x00\x00\x00\x00\x00\x00\x00" +_SKIPLIM = b"\x00\x00\x00\x00\xff\xff\xff\xff" +_OP_MAP = { + _INSERT: b"\x04documents\x00\x00\x00\x00\x00", + _UPDATE: b"\x04updates\x00\x00\x00\x00\x00", + _DELETE: b"\x04deletes\x00\x00\x00\x00\x00", +} +_FIELD_MAP = {"insert": "documents", "update": "updates", "delete": "deletes"} + +_UNICODE_REPLACE_CODEC_OPTIONS: "CodecOptions[Mapping[str, Any]]" = CodecOptions( + unicode_decode_error_handler="replace" +) + + +def _randint() -> int: + """Generate a pseudo random 32 bit integer.""" + return random.randint(MIN_INT32, MAX_INT32) + + +def _maybe_add_read_preference( + spec: MutableMapping[str, Any], read_preference: _ServerMode +) -> MutableMapping[str, Any]: + """Add $readPreference to spec when appropriate.""" + mode = read_preference.mode + document = read_preference.document + # Only add $readPreference if it's something other than primary to avoid + # problems with mongos versions that don't support read preferences. Also, + # for maximum backwards compatibility, don't add $readPreference for + # secondaryPreferred unless tags or maxStalenessSeconds are in use (setting + # the secondaryOkay bit has the same effect). + if mode and (mode != ReadPreference.SECONDARY_PREFERRED.mode or len(document) > 1): + if "$query" not in spec: + spec = SON([("$query", spec)]) + spec["$readPreference"] = document + return spec + + +def _convert_exception(exception: Exception) -> Dict[str, Any]: + """Convert an Exception into a failure document for publishing.""" + return {"errmsg": str(exception), "errtype": exception.__class__.__name__} + + +def _convert_write_result( + operation: str, command: Mapping[str, Any], result: Mapping[str, Any] +) -> Dict[str, Any]: + """Convert a legacy write result to write command format.""" + # Based on _merge_legacy from bulk.py + affected = result.get("n", 0) + res = {"ok": 1, "n": affected} + errmsg = result.get("errmsg", result.get("err", "")) + if errmsg: + # The write was successful on at least the primary so don't return. + if result.get("wtimeout"): + res["writeConcernError"] = {"errmsg": errmsg, "code": 64, "errInfo": {"wtimeout": True}} + else: + # The write failed. + error = {"index": 0, "code": result.get("code", 8), "errmsg": errmsg} + if "errInfo" in result: + error["errInfo"] = result["errInfo"] + res["writeErrors"] = [error] + return res + if operation == "insert": + # GLE result for insert is always 0 in most MongoDB versions. + res["n"] = len(command["documents"]) + elif operation == "update": + if "upserted" in result: + res["upserted"] = [{"index": 0, "_id": result["upserted"]}] + # Versions of MongoDB before 2.6 don't return the _id for an + # upsert if _id is not an ObjectId. + elif result.get("updatedExisting") is False and affected == 1: + # If _id is in both the update document *and* the query spec + # the update document _id takes precedence. + update = command["updates"][0] + _id = update["u"].get("_id", update["q"].get("_id")) + res["upserted"] = [{"index": 0, "_id": _id}] + return res + + +_OPTIONS = SON( + [ + ("tailable", 2), + ("oplogReplay", 8), + ("noCursorTimeout", 16), + ("awaitData", 32), + ("allowPartialResults", 128), + ] +) + + +_MODIFIERS = SON( + [ + ("$query", "filter"), + ("$orderby", "sort"), + ("$hint", "hint"), + ("$comment", "comment"), + ("$maxScan", "maxScan"), + ("$maxTimeMS", "maxTimeMS"), + ("$max", "max"), + ("$min", "min"), + ("$returnKey", "returnKey"), + ("$showRecordId", "showRecordId"), + ("$showDiskLoc", "showRecordId"), # <= MongoDb 3.0 + ("$snapshot", "snapshot"), + ] +) + + +def _gen_find_command( + coll: str, + spec: Mapping[str, Any], + projection: Optional[Union[Mapping[str, Any], Iterable[str]]], + skip: int, + limit: int, + batch_size: Optional[int], + options: Optional[int], + read_concern: ReadConcern, + collation: Optional[Mapping[str, Any]] = None, + session: Optional[ClientSession] = None, + allow_disk_use: Optional[bool] = None, +) -> SON[str, Any]: + """Generate a find command document.""" + cmd: SON[str, Any] = SON([("find", coll)]) + if "$query" in spec: + cmd.update( + [ + (_MODIFIERS[key], val) if key in _MODIFIERS else (key, val) + for key, val in spec.items() + ] + ) + if "$explain" in cmd: + cmd.pop("$explain") + if "$readPreference" in cmd: + cmd.pop("$readPreference") + else: + cmd["filter"] = spec + + if projection: + cmd["projection"] = projection + if skip: + cmd["skip"] = skip + if limit: + cmd["limit"] = abs(limit) + if limit < 0: + cmd["singleBatch"] = True + if batch_size: + cmd["batchSize"] = batch_size + if read_concern.level and not (session and session.in_transaction): + cmd["readConcern"] = read_concern.document + if collation: + cmd["collation"] = collation + if allow_disk_use is not None: + cmd["allowDiskUse"] = allow_disk_use + if options: + cmd.update([(opt, True) for opt, val in _OPTIONS.items() if options & val]) + + return cmd + + +def _gen_get_more_command( + cursor_id: Optional[int], + coll: str, + batch_size: Optional[int], + max_await_time_ms: Optional[int], + comment: Optional[Any], + conn: Connection, +) -> SON[str, Any]: + """Generate a getMore command document.""" + cmd: SON[str, Any] = SON([("getMore", cursor_id), ("collection", coll)]) + if batch_size: + cmd["batchSize"] = batch_size + if max_await_time_ms is not None: + cmd["maxTimeMS"] = max_await_time_ms + if comment is not None and conn.max_wire_version >= 9: + cmd["comment"] = comment + return cmd + + +class _Query: + """A query operation.""" + + __slots__ = ( + "flags", + "db", + "coll", + "ntoskip", + "spec", + "fields", + "codec_options", + "read_preference", + "limit", + "batch_size", + "name", + "read_concern", + "collation", + "session", + "client", + "allow_disk_use", + "_as_command", + "exhaust", + ) + + # For compatibility with the _GetMore class. + conn_mgr = None + cursor_id = None + + def __init__( + self, + flags: int, + db: str, + coll: str, + ntoskip: int, + spec: Mapping[str, Any], + fields: Optional[Mapping[str, Any]], + codec_options: CodecOptions, + read_preference: _ServerMode, + limit: int, + batch_size: int, + read_concern: ReadConcern, + collation: Optional[Mapping[str, Any]], + session: Optional[ClientSession], + client: MongoClient, + allow_disk_use: Optional[bool], + exhaust: bool, + ): + self.flags = flags + self.db = db + self.coll = coll + self.ntoskip = ntoskip + self.spec = spec + self.fields = fields + self.codec_options = codec_options + self.read_preference = read_preference + self.read_concern = read_concern + self.limit = limit + self.batch_size = batch_size + self.collation = collation + self.session = session + self.client = client + self.allow_disk_use = allow_disk_use + self.name = "find" + self._as_command: Optional[Tuple[SON[str, Any], str]] = None + self.exhaust = exhaust + + def reset(self) -> None: + self._as_command = None + + def namespace(self) -> str: + return f"{self.db}.{self.coll}" + + def use_command(self, conn: Connection) -> bool: + use_find_cmd = False + if not self.exhaust: + use_find_cmd = True + elif conn.max_wire_version >= 8: + # OP_MSG supports exhaust on MongoDB 4.2+ + use_find_cmd = True + elif not self.read_concern.ok_for_legacy: + raise ConfigurationError( + "read concern level of %s is not valid " + "with a max wire version of %d." % (self.read_concern.level, conn.max_wire_version) + ) + + conn.validate_session(self.client, self.session) + return use_find_cmd + + def as_command( + self, conn: Connection, apply_timeout: bool = False + ) -> Tuple[SON[str, Any], str]: + """Return a find command document for this query.""" + # We use the command twice: on the wire and for command monitoring. + # Generate it once, for speed and to avoid repeating side-effects. + if self._as_command is not None: + return self._as_command + + explain = "$explain" in self.spec + cmd: SON[str, Any] = _gen_find_command( + self.coll, + self.spec, + self.fields, + self.ntoskip, + self.limit, + self.batch_size, + self.flags, + self.read_concern, + self.collation, + self.session, + self.allow_disk_use, + ) + if explain: + self.name = "explain" + cmd = SON([("explain", cmd)]) + session = self.session + conn.add_server_api(cmd) + if session: + session._apply_to(cmd, False, self.read_preference, conn) + # Explain does not support readConcern. + if not explain and not session.in_transaction: + session._update_read_concern(cmd, conn) + conn.send_cluster_time(cmd, session, self.client) + # Support auto encryption + client = self.client + if client._encrypter and not client._encrypter._bypass_auto_encryption: + cmd = cast(SON[str, Any], client._encrypter.encrypt(self.db, cmd, self.codec_options)) + # Support CSOT + if apply_timeout: + conn.apply_timeout(client, cmd) + self._as_command = cmd, self.db + return self._as_command + + def get_message( + self, read_preference: _ServerMode, conn: Connection, use_cmd: bool = False + ) -> Tuple[int, bytes, int]: + """Get a query message, possibly setting the secondaryOk bit.""" + # Use the read_preference decided by _socket_from_server. + self.read_preference = read_preference + if read_preference.mode: + # Set the secondaryOk bit. + flags = self.flags | 4 + else: + flags = self.flags + + ns = self.namespace() + spec = self.spec + + if use_cmd: + spec = self.as_command(conn, apply_timeout=True)[0] + request_id, msg, size, _ = _op_msg( + 0, + spec, + self.db, + read_preference, + self.codec_options, + ctx=conn.compression_context, + ) + return request_id, msg, size + + # OP_QUERY treats ntoreturn of -1 and 1 the same, return + # one document and close the cursor. We have to use 2 for + # batch size if 1 is specified. + ntoreturn = self.batch_size == 1 and 2 or self.batch_size + if self.limit: + if ntoreturn: + ntoreturn = min(self.limit, ntoreturn) + else: + ntoreturn = self.limit + + if conn.is_mongos: + assert isinstance(spec, MutableMapping) + spec = _maybe_add_read_preference(spec, read_preference) + + return _query( + flags, + ns, + self.ntoskip, + ntoreturn, + spec, + None if use_cmd else self.fields, + self.codec_options, + ctx=conn.compression_context, + ) + + +class _GetMore: + """A getmore operation.""" + + __slots__ = ( + "db", + "coll", + "ntoreturn", + "cursor_id", + "max_await_time_ms", + "codec_options", + "read_preference", + "session", + "client", + "conn_mgr", + "_as_command", + "exhaust", + "comment", + ) + + name = "getMore" + + def __init__( + self, + db: str, + coll: str, + ntoreturn: int, + cursor_id: int, + codec_options: CodecOptions, + read_preference: _ServerMode, + session: Optional[ClientSession], + client: MongoClient, + max_await_time_ms: Optional[int], + conn_mgr: Any, + exhaust: bool, + comment: Any, + ): + self.db = db + self.coll = coll + self.ntoreturn = ntoreturn + self.cursor_id = cursor_id + self.codec_options = codec_options + self.read_preference = read_preference + self.session = session + self.client = client + self.max_await_time_ms = max_await_time_ms + self.conn_mgr = conn_mgr + self._as_command: Optional[Tuple[SON[str, Any], str]] = None + self.exhaust = exhaust + self.comment = comment + + def reset(self) -> None: + self._as_command = None + + def namespace(self) -> str: + return f"{self.db}.{self.coll}" + + def use_command(self, conn: Connection) -> bool: + use_cmd = False + if not self.exhaust: + use_cmd = True + elif conn.max_wire_version >= 8: + # OP_MSG supports exhaust on MongoDB 4.2+ + use_cmd = True + + conn.validate_session(self.client, self.session) + return use_cmd + + def as_command( + self, conn: Connection, apply_timeout: bool = False + ) -> Tuple[SON[str, Any], str]: + """Return a getMore command document for this query.""" + # See _Query.as_command for an explanation of this caching. + if self._as_command is not None: + return self._as_command + + cmd: SON[str, Any] = _gen_get_more_command( + self.cursor_id, + self.coll, + self.ntoreturn, + self.max_await_time_ms, + self.comment, + conn, + ) + if self.session: + self.session._apply_to(cmd, False, self.read_preference, conn) + conn.add_server_api(cmd) + conn.send_cluster_time(cmd, self.session, self.client) + # Support auto encryption + client = self.client + if client._encrypter and not client._encrypter._bypass_auto_encryption: + cmd = cast(SON[str, Any], client._encrypter.encrypt(self.db, cmd, self.codec_options)) + # Support CSOT + if apply_timeout: + conn.apply_timeout(client, cmd=None) + self._as_command = cmd, self.db + return self._as_command + + def get_message( + self, dummy0: Any, conn: Connection, use_cmd: bool = False + ) -> Union[Tuple[int, bytes, int], Tuple[int, bytes]]: + """Get a getmore message.""" + ns = self.namespace() + ctx = conn.compression_context + + if use_cmd: + spec = self.as_command(conn, apply_timeout=True)[0] + if self.conn_mgr: + flags = _OpMsg.EXHAUST_ALLOWED + else: + flags = 0 + request_id, msg, size, _ = _op_msg( + flags, spec, self.db, None, self.codec_options, ctx=conn.compression_context + ) + return request_id, msg, size + + return _get_more(ns, self.ntoreturn, self.cursor_id, ctx) + + +class _RawBatchQuery(_Query): + def use_command(self, conn: Connection) -> bool: + # Compatibility checks. + super().use_command(conn) + if conn.max_wire_version >= 8: + # MongoDB 4.2+ supports exhaust over OP_MSG + return True + elif not self.exhaust: + return True + return False + + +class _RawBatchGetMore(_GetMore): + def use_command(self, conn: Connection) -> bool: + # Compatibility checks. + super().use_command(conn) + if conn.max_wire_version >= 8: + # MongoDB 4.2+ supports exhaust over OP_MSG + return True + elif not self.exhaust: + return True + return False + + +class _CursorAddress(tuple): + """The server address (host, port) of a cursor, with namespace property.""" + + __namespace: Any + + def __new__(cls, address: _Address, namespace: str) -> _CursorAddress: + self = tuple.__new__(cls, address) + self.__namespace = namespace + return self + + @property + def namespace(self) -> str: + """The namespace this cursor.""" + return self.__namespace + + def __hash__(self) -> int: + # Two _CursorAddress instances with different namespaces + # must not hash the same. + return ((*self, self.__namespace)).__hash__() + + def __eq__(self, other: object) -> bool: + if isinstance(other, _CursorAddress): + return tuple(self) == tuple(other) and self.namespace == other.namespace + return NotImplemented + + def __ne__(self, other: object) -> bool: + return not self == other + + +_pack_compression_header = struct.Struct("<iiiiiiB").pack +_COMPRESSION_HEADER_SIZE = 25 + + +def _compress( + operation: int, data: bytes, ctx: Union[SnappyContext, ZlibContext, ZstdContext] +) -> Tuple[int, bytes]: + """Takes message data, compresses it, and adds an OP_COMPRESSED header.""" + compressed = ctx.compress(data) + request_id = _randint() + + header = _pack_compression_header( + _COMPRESSION_HEADER_SIZE + len(compressed), # Total message length + request_id, # Request id + 0, # responseTo + 2012, # operation id + operation, # original operation id + len(data), # uncompressed message length + ctx.compressor_id, + ) # compressor id + return request_id, header + compressed + + +_pack_header = struct.Struct("<iiii").pack + + +def __pack_message(operation: int, data: bytes) -> Tuple[int, bytes]: + """Takes message data and adds a message header based on the operation. + + Returns the resultant message string. + """ + rid = _randint() + message = _pack_header(16 + len(data), rid, 0, operation) + return rid, message + data + + +_pack_int = struct.Struct("<i").pack +_pack_op_msg_flags_type = struct.Struct("<IB").pack +_pack_byte = struct.Struct("<B").pack + + +def _op_msg_no_header( + flags: int, + command: Mapping[str, Any], + identifier: str, + docs: Optional[List[Mapping[str, Any]]], + opts: CodecOptions, +) -> Tuple[bytes, int, int]: + """Get a OP_MSG message. + + Note: this method handles multiple documents in a type one payload but + it does not perform batch splitting and the total message size is + only checked *after* generating the entire message. + """ + # Encode the command document in payload 0 without checking keys. + encoded = _dict_to_bson(command, False, opts) + flags_type = _pack_op_msg_flags_type(flags, 0) + total_size = len(encoded) + max_doc_size = 0 + if identifier and docs is not None: + type_one = _pack_byte(1) + cstring = _make_c_string(identifier) + encoded_docs = [_dict_to_bson(doc, False, opts) for doc in docs] + size = len(cstring) + sum(len(doc) for doc in encoded_docs) + 4 + encoded_size = _pack_int(size) + total_size += size + max_doc_size = max(len(doc) for doc in encoded_docs) + data = [flags_type, encoded, type_one, encoded_size, cstring, *encoded_docs] + else: + data = [flags_type, encoded] + return b"".join(data), total_size, max_doc_size + + +def _op_msg_compressed( + flags: int, + command: Mapping[str, Any], + identifier: str, + docs: Optional[List[Mapping[str, Any]]], + opts: CodecOptions, + ctx: Union[SnappyContext, ZlibContext, ZstdContext], +) -> Tuple[int, bytes, int, int]: + """Internal OP_MSG message helper.""" + msg, total_size, max_bson_size = _op_msg_no_header(flags, command, identifier, docs, opts) + rid, msg = _compress(2013, msg, ctx) + return rid, msg, total_size, max_bson_size + + +def _op_msg_uncompressed( + flags: int, + command: Mapping[str, Any], + identifier: str, + docs: Optional[List[Mapping[str, Any]]], + opts: CodecOptions, +) -> Tuple[int, bytes, int, int]: + """Internal compressed OP_MSG message helper.""" + data, total_size, max_bson_size = _op_msg_no_header(flags, command, identifier, docs, opts) + request_id, op_message = __pack_message(2013, data) + return request_id, op_message, total_size, max_bson_size + + +if _use_c: + _op_msg_uncompressed = _cmessage._op_msg # noqa: F811 + + +def _op_msg( + flags: int, + command: MutableMapping[str, Any], + dbname: str, + read_preference: Optional[_ServerMode], + opts: CodecOptions, + ctx: Union[SnappyContext, ZlibContext, ZstdContext, None] = None, +) -> Tuple[int, bytes, int, int]: + """Get a OP_MSG message.""" + command["$db"] = dbname + # getMore commands do not send $readPreference. + if read_preference is not None and "$readPreference" not in command: + # Only send $readPreference if it's not primary (the default). + if read_preference.mode: + command["$readPreference"] = read_preference.document + name = next(iter(command)) + try: + identifier = _FIELD_MAP[name] + docs = command.pop(identifier) + except KeyError: + identifier = "" + docs = None + try: + if ctx: + return _op_msg_compressed(flags, command, identifier, docs, opts, ctx) + return _op_msg_uncompressed(flags, command, identifier, docs, opts) + finally: + # Add the field back to the command. + if identifier: + command[identifier] = docs + + +def _query_impl( + options: int, + collection_name: str, + num_to_skip: int, + num_to_return: int, + query: Mapping[str, Any], + field_selector: Optional[Mapping[str, Any]], + opts: CodecOptions, +) -> Tuple[bytes, int]: + """Get an OP_QUERY message.""" + encoded = _dict_to_bson(query, False, opts) + if field_selector: + efs = _dict_to_bson(field_selector, False, opts) + else: + efs = b"" + max_bson_size = max(len(encoded), len(efs)) + return ( + b"".join( + [ + _pack_int(options), + _make_c_string(collection_name), + _pack_int(num_to_skip), + _pack_int(num_to_return), + encoded, + efs, + ] + ), + max_bson_size, + ) + + +def _query_compressed( + options: int, + collection_name: str, + num_to_skip: int, + num_to_return: int, + query: Mapping[str, Any], + field_selector: Optional[Mapping[str, Any]], + opts: CodecOptions, + ctx: Union[SnappyContext, ZlibContext, ZstdContext], +) -> Tuple[int, bytes, int]: + """Internal compressed query message helper.""" + op_query, max_bson_size = _query_impl( + options, collection_name, num_to_skip, num_to_return, query, field_selector, opts + ) + rid, msg = _compress(2004, op_query, ctx) + return rid, msg, max_bson_size + + +def _query_uncompressed( + options: int, + collection_name: str, + num_to_skip: int, + num_to_return: int, + query: Mapping[str, Any], + field_selector: Optional[Mapping[str, Any]], + opts: CodecOptions, +) -> Tuple[int, bytes, int]: + """Internal query message helper.""" + op_query, max_bson_size = _query_impl( + options, collection_name, num_to_skip, num_to_return, query, field_selector, opts + ) + rid, msg = __pack_message(2004, op_query) + return rid, msg, max_bson_size + + +if _use_c: + _query_uncompressed = _cmessage._query_message # noqa: F811 + + +def _query( + options: int, + collection_name: str, + num_to_skip: int, + num_to_return: int, + query: Mapping[str, Any], + field_selector: Optional[Mapping[str, Any]], + opts: CodecOptions, + ctx: Union[SnappyContext, ZlibContext, ZstdContext, None] = None, +) -> Tuple[int, bytes, int]: + """Get a **query** message.""" + if ctx: + return _query_compressed( + options, collection_name, num_to_skip, num_to_return, query, field_selector, opts, ctx + ) + return _query_uncompressed( + options, collection_name, num_to_skip, num_to_return, query, field_selector, opts + ) + + +_pack_long_long = struct.Struct("<q").pack + + +def _get_more_impl(collection_name: str, num_to_return: int, cursor_id: int) -> bytes: + """Get an OP_GET_MORE message.""" + return b"".join( + [ + _ZERO_32, + _make_c_string(collection_name), + _pack_int(num_to_return), + _pack_long_long(cursor_id), + ] + ) + + +def _get_more_compressed( + collection_name: str, + num_to_return: int, + cursor_id: int, + ctx: Union[SnappyContext, ZlibContext, ZstdContext], +) -> Tuple[int, bytes]: + """Internal compressed getMore message helper.""" + return _compress(2005, _get_more_impl(collection_name, num_to_return, cursor_id), ctx) + + +def _get_more_uncompressed( + collection_name: str, num_to_return: int, cursor_id: int +) -> Tuple[int, bytes]: + """Internal getMore message helper.""" + return __pack_message(2005, _get_more_impl(collection_name, num_to_return, cursor_id)) + + +if _use_c: + _get_more_uncompressed = _cmessage._get_more_message # noqa: F811 + + +def _get_more( + collection_name: str, + num_to_return: int, + cursor_id: int, + ctx: Union[SnappyContext, ZlibContext, ZstdContext, None] = None, +) -> Tuple[int, bytes]: + """Get a **getMore** message.""" + if ctx: + return _get_more_compressed(collection_name, num_to_return, cursor_id, ctx) + return _get_more_uncompressed(collection_name, num_to_return, cursor_id) + + +class _BulkWriteContext: + """A wrapper around Connection for use with write splitting functions.""" + + __slots__ = ( + "db_name", + "conn", + "op_id", + "name", + "field", + "publish", + "start_time", + "listeners", + "session", + "compress", + "op_type", + "codec", + ) + + def __init__( + self, + database_name: str, + cmd_name: str, + conn: Connection, + operation_id: int, + listeners: _EventListeners, + session: ClientSession, + op_type: int, + codec: CodecOptions, + ): + self.db_name = database_name + self.conn = conn + self.op_id = operation_id + self.listeners = listeners + self.publish = listeners.enabled_for_commands + self.name = cmd_name + self.field = _FIELD_MAP[self.name] + self.start_time = datetime.datetime.now() if self.publish else None + self.session = session + self.compress = True if conn.compression_context else False + self.op_type = op_type + self.codec = codec + + def __batch_command( + self, cmd: MutableMapping[str, Any], docs: List[Mapping[str, Any]] + ) -> Tuple[int, bytes, List[Mapping[str, Any]]]: + namespace = self.db_name + ".$cmd" + request_id, msg, to_send = _do_batched_op_msg( + namespace, self.op_type, cmd, docs, self.codec, self + ) + if not to_send: + raise InvalidOperation("cannot do an empty bulk write") + return request_id, msg, to_send + + def execute( + self, cmd: MutableMapping[str, Any], docs: List[Mapping[str, Any]], client: MongoClient + ) -> Tuple[Mapping[str, Any], List[Mapping[str, Any]]]: + request_id, msg, to_send = self.__batch_command(cmd, docs) + result = self.write_command(cmd, request_id, msg, to_send) + client._process_response(result, self.session) + return result, to_send + + def execute_unack( + self, cmd: MutableMapping[str, Any], docs: List[Mapping[str, Any]], client: MongoClient + ) -> List[Mapping[str, Any]]: + request_id, msg, to_send = self.__batch_command(cmd, docs) + # Though this isn't strictly a "legacy" write, the helper + # handles publishing commands and sending our message + # without receiving a result. Send 0 for max_doc_size + # to disable size checking. Size checking is handled while + # the documents are encoded to BSON. + self.unack_write(cmd, request_id, msg, 0, to_send) + return to_send + + @property + def max_bson_size(self) -> int: + """A proxy for SockInfo.max_bson_size.""" + return self.conn.max_bson_size + + @property + def max_message_size(self) -> int: + """A proxy for SockInfo.max_message_size.""" + if self.compress: + # Subtract 16 bytes for the message header. + return self.conn.max_message_size - 16 + return self.conn.max_message_size + + @property + def max_write_batch_size(self) -> int: + """A proxy for SockInfo.max_write_batch_size.""" + return self.conn.max_write_batch_size + + @property + def max_split_size(self) -> int: + """The maximum size of a BSON command before batch splitting.""" + return self.max_bson_size + + def unack_write( + self, + cmd: MutableMapping[str, Any], + request_id: int, + msg: bytes, + max_doc_size: int, + docs: List[Mapping[str, Any]], + ) -> Optional[Mapping[str, Any]]: + """A proxy for Connection.unack_write that handles event publishing.""" + if self.publish: + assert self.start_time is not None + duration = datetime.datetime.now() - self.start_time + cmd = self._start(cmd, request_id, docs) + start = datetime.datetime.now() + try: + result = self.conn.unack_write(msg, max_doc_size) # type: ignore[func-returns-value] + if self.publish: + duration = (datetime.datetime.now() - start) + duration + if result is not None: + reply = _convert_write_result(self.name, cmd, result) + else: + # Comply with APM spec. + reply = {"ok": 1} + self._succeed(request_id, reply, duration) + except Exception as exc: + if self.publish: + assert self.start_time is not None + duration = (datetime.datetime.now() - start) + duration + if isinstance(exc, OperationFailure): + failure: _DocumentOut = _convert_write_result(self.name, cmd, exc.details) # type: ignore[arg-type] + elif isinstance(exc, NotPrimaryError): + failure = exc.details # type: ignore[assignment] + else: + failure = _convert_exception(exc) + self._fail(request_id, failure, duration) + raise + finally: + self.start_time = datetime.datetime.now() + return result + + @_handle_reauth + def write_command( + self, + cmd: MutableMapping[str, Any], + request_id: int, + msg: bytes, + docs: List[Mapping[str, Any]], + ) -> Dict[str, Any]: + """A proxy for SocketInfo.write_command that handles event publishing.""" + if self.publish: + assert self.start_time is not None + duration = datetime.datetime.now() - self.start_time + self._start(cmd, request_id, docs) + start = datetime.datetime.now() + try: + reply = self.conn.write_command(request_id, msg, self.codec) + if self.publish: + duration = (datetime.datetime.now() - start) + duration + self._succeed(request_id, reply, duration) + except Exception as exc: + if self.publish: + duration = (datetime.datetime.now() - start) + duration + if isinstance(exc, (NotPrimaryError, OperationFailure)): + failure: _DocumentOut = exc.details # type: ignore[assignment] + else: + failure = _convert_exception(exc) + self._fail(request_id, failure, duration) + raise + finally: + self.start_time = datetime.datetime.now() + return reply + + def _start( + self, cmd: MutableMapping[str, Any], request_id: int, docs: List[Mapping[str, Any]] + ) -> MutableMapping[str, Any]: + """Publish a CommandStartedEvent.""" + cmd[self.field] = docs + self.listeners.publish_command_start( + cmd, + self.db_name, + request_id, + self.conn.address, + self.op_id, + self.conn.service_id, + ) + return cmd + + def _succeed(self, request_id: int, reply: _DocumentOut, duration: timedelta) -> None: + """Publish a CommandSucceededEvent.""" + self.listeners.publish_command_success( + duration, + reply, + self.name, + request_id, + self.conn.address, + self.op_id, + self.conn.service_id, + ) + + def _fail(self, request_id: int, failure: _DocumentOut, duration: timedelta) -> None: + """Publish a CommandFailedEvent.""" + self.listeners.publish_command_failure( + duration, + failure, + self.name, + request_id, + self.conn.address, + self.op_id, + self.conn.service_id, + ) + + +# From the Client Side Encryption spec: +# Because automatic encryption increases the size of commands, the driver +# MUST split bulk writes at a reduced size limit before undergoing automatic +# encryption. The write payload MUST be split at 2MiB (2097152). +_MAX_SPLIT_SIZE_ENC = 2097152 + + +class _EncryptedBulkWriteContext(_BulkWriteContext): + __slots__ = () + + def __batch_command( + self, cmd: MutableMapping[str, Any], docs: List[Mapping[str, Any]] + ) -> Tuple[Dict[str, Any], List[Mapping[str, Any]]]: + namespace = self.db_name + ".$cmd" + msg, to_send = _encode_batched_write_command( + namespace, self.op_type, cmd, docs, self.codec, self + ) + if not to_send: + raise InvalidOperation("cannot do an empty bulk write") + + # Chop off the OP_QUERY header to get a properly batched write command. + cmd_start = msg.index(b"\x00", 4) + 9 + cmd = _inflate_bson(memoryview(msg)[cmd_start:], DEFAULT_RAW_BSON_OPTIONS) + return cmd, to_send + + def execute( + self, cmd: MutableMapping[str, Any], docs: List[Mapping[str, Any]], client: MongoClient + ) -> Tuple[Mapping[str, Any], List[Mapping[str, Any]]]: + batched_cmd, to_send = self.__batch_command(cmd, docs) + result: Mapping[str, Any] = self.conn.command( + self.db_name, batched_cmd, codec_options=self.codec, session=self.session, client=client + ) + return result, to_send + + def execute_unack( + self, cmd: MutableMapping[str, Any], docs: List[Mapping[str, Any]], client: MongoClient + ) -> List[Mapping[str, Any]]: + batched_cmd, to_send = self.__batch_command(cmd, docs) + self.conn.command( + self.db_name, + batched_cmd, + write_concern=WriteConcern(w=0), + session=self.session, + client=client, + ) + return to_send + + @property + def max_split_size(self) -> int: + """Reduce the batch splitting size.""" + return _MAX_SPLIT_SIZE_ENC + + +def _raise_document_too_large(operation: str, doc_size: int, max_size: int) -> NoReturn: + """Internal helper for raising DocumentTooLarge.""" + if operation == "insert": + raise DocumentTooLarge( + "BSON document too large (%d bytes)" + " - the connected server supports" + " BSON document sizes up to %d" + " bytes." % (doc_size, max_size) + ) + else: + # There's nothing intelligent we can say + # about size for update and delete + raise DocumentTooLarge(f"{operation!r} command document too large") + + +# OP_MSG ------------------------------------------------------------- + + +_OP_MSG_MAP = { + _INSERT: b"documents\x00", + _UPDATE: b"updates\x00", + _DELETE: b"deletes\x00", +} + + +def _batched_op_msg_impl( + operation: int, + command: Mapping[str, Any], + docs: List[Mapping[str, Any]], + ack: bool, + opts: CodecOptions, + ctx: _BulkWriteContext, + buf: _BytesIO, +) -> Tuple[List[Mapping[str, Any]], int]: + """Create a batched OP_MSG write.""" + max_bson_size = ctx.max_bson_size + max_write_batch_size = ctx.max_write_batch_size + max_message_size = ctx.max_message_size + + flags = b"\x00\x00\x00\x00" if ack else b"\x02\x00\x00\x00" + # Flags + buf.write(flags) + + # Type 0 Section + buf.write(b"\x00") + buf.write(_dict_to_bson(command, False, opts)) + + # Type 1 Section + buf.write(b"\x01") + size_location = buf.tell() + # Save space for size + buf.write(b"\x00\x00\x00\x00") + try: + buf.write(_OP_MSG_MAP[operation]) + except KeyError: + raise InvalidOperation("Unknown command") + + to_send = [] + idx = 0 + for doc in docs: + # Encode the current operation + value = _dict_to_bson(doc, False, opts) + doc_length = len(value) + new_message_size = buf.tell() + doc_length + # Does first document exceed max_message_size? + doc_too_large = idx == 0 and (new_message_size > max_message_size) + # When OP_MSG is used unacknowledged we have to check + # document size client side or applications won't be notified. + # Otherwise we let the server deal with documents that are too large + # since ordered=False causes those documents to be skipped instead of + # halting the bulk write operation. + unacked_doc_too_large = not ack and (doc_length > max_bson_size) + if doc_too_large or unacked_doc_too_large: + write_op = list(_FIELD_MAP.keys())[operation] + _raise_document_too_large(write_op, len(value), max_bson_size) + # We have enough data, return this batch. + if new_message_size > max_message_size: + break + buf.write(value) + to_send.append(doc) + idx += 1 + # We have enough documents, return this batch. + if idx == max_write_batch_size: + break + + # Write type 1 section size + length = buf.tell() + buf.seek(size_location) + buf.write(_pack_int(length - size_location)) + + return to_send, length + + +def _encode_batched_op_msg( + operation: int, + command: Mapping[str, Any], + docs: List[Mapping[str, Any]], + ack: bool, + opts: CodecOptions, + ctx: _BulkWriteContext, +) -> Tuple[bytes, List[Mapping[str, Any]]]: + """Encode the next batched insert, update, or delete operation + as OP_MSG. + """ + buf = _BytesIO() + + to_send, _ = _batched_op_msg_impl(operation, command, docs, ack, opts, ctx, buf) + return buf.getvalue(), to_send + + +if _use_c: + _encode_batched_op_msg = _cmessage._encode_batched_op_msg # noqa: F811 + + +def _batched_op_msg_compressed( + operation: int, + command: Mapping[str, Any], + docs: List[Mapping[str, Any]], + ack: bool, + opts: CodecOptions, + ctx: _BulkWriteContext, +) -> Tuple[int, bytes, List[Mapping[str, Any]]]: + """Create the next batched insert, update, or delete operation + with OP_MSG, compressed. + """ + data, to_send = _encode_batched_op_msg(operation, command, docs, ack, opts, ctx) + + assert ctx.conn.compression_context is not None + request_id, msg = _compress(2013, data, ctx.conn.compression_context) + return request_id, msg, to_send + + +def _batched_op_msg( + operation: int, + command: Mapping[str, Any], + docs: List[Mapping[str, Any]], + ack: bool, + opts: CodecOptions, + ctx: _BulkWriteContext, +) -> Tuple[int, bytes, List[Mapping[str, Any]]]: + """OP_MSG implementation entry point.""" + buf = _BytesIO() + + # Save space for message length and request id + buf.write(_ZERO_64) + # responseTo, opCode + buf.write(b"\x00\x00\x00\x00\xdd\x07\x00\x00") + + to_send, length = _batched_op_msg_impl(operation, command, docs, ack, opts, ctx, buf) + + # Header - request id and message length + buf.seek(4) + request_id = _randint() + buf.write(_pack_int(request_id)) + buf.seek(0) + buf.write(_pack_int(length)) + + return request_id, buf.getvalue(), to_send + + +if _use_c: + _batched_op_msg = _cmessage._batched_op_msg # noqa: F811 + + +def _do_batched_op_msg( + namespace: str, + operation: int, + command: MutableMapping[str, Any], + docs: List[Mapping[str, Any]], + opts: CodecOptions, + ctx: _BulkWriteContext, +) -> Tuple[int, bytes, List[Mapping[str, Any]]]: + """Create the next batched insert, update, or delete operation + using OP_MSG. + """ + command["$db"] = namespace.split(".", 1)[0] + if "writeConcern" in command: + ack = bool(command["writeConcern"].get("w", 1)) + else: + ack = True + if ctx.conn.compression_context: + return _batched_op_msg_compressed(operation, command, docs, ack, opts, ctx) + return _batched_op_msg(operation, command, docs, ack, opts, ctx) + + +# End OP_MSG ----------------------------------------------------- + + +def _encode_batched_write_command( + namespace: str, + operation: int, + command: MutableMapping[str, Any], + docs: List[Mapping[str, Any]], + opts: CodecOptions, + ctx: _BulkWriteContext, +) -> Tuple[bytes, List[Mapping[str, Any]]]: + """Encode the next batched insert, update, or delete command.""" + buf = _BytesIO() + + to_send, _ = _batched_write_command_impl(namespace, operation, command, docs, opts, ctx, buf) + return buf.getvalue(), to_send + + +if _use_c: + _encode_batched_write_command = _cmessage._encode_batched_write_command # noqa: F811 + + +def _batched_write_command_impl( + namespace: str, + operation: int, + command: MutableMapping[str, Any], + docs: List[Mapping[str, Any]], + opts: CodecOptions, + ctx: _BulkWriteContext, + buf: _BytesIO, +) -> Tuple[List[Mapping[str, Any]], int]: + """Create a batched OP_QUERY write command.""" + max_bson_size = ctx.max_bson_size + max_write_batch_size = ctx.max_write_batch_size + # Max BSON object size + 16k - 2 bytes for ending NUL bytes. + # Server guarantees there is enough room: SERVER-10643. + max_cmd_size = max_bson_size + _COMMAND_OVERHEAD + max_split_size = ctx.max_split_size + + # No options + buf.write(_ZERO_32) + # Namespace as C string + buf.write(namespace.encode("utf8")) + buf.write(_ZERO_8) + # Skip: 0, Limit: -1 + buf.write(_SKIPLIM) + + # Where to write command document length + command_start = buf.tell() + buf.write(encode(command)) + + # Start of payload + buf.seek(-1, 2) + # Work around some Jython weirdness. + buf.truncate() + try: + buf.write(_OP_MAP[operation]) + except KeyError: + raise InvalidOperation("Unknown command") + + # Where to write list document length + list_start = buf.tell() - 4 + to_send = [] + idx = 0 + for doc in docs: + # Encode the current operation + key = str(idx).encode("utf8") + value = _dict_to_bson(doc, False, opts) + # Is there enough room to add this document? max_cmd_size accounts for + # the two trailing null bytes. + doc_too_large = len(value) > max_cmd_size + if doc_too_large: + write_op = list(_FIELD_MAP.keys())[operation] + _raise_document_too_large(write_op, len(value), max_bson_size) + enough_data = idx >= 1 and (buf.tell() + len(key) + len(value)) >= max_split_size + enough_documents = idx >= max_write_batch_size + if enough_data or enough_documents: + break + buf.write(_BSONOBJ) + buf.write(key) + buf.write(_ZERO_8) + buf.write(value) + to_send.append(doc) + idx += 1 + + # Finalize the current OP_QUERY message. + # Close list and command documents + buf.write(_ZERO_16) + + # Write document lengths and request id + length = buf.tell() + buf.seek(list_start) + buf.write(_pack_int(length - list_start - 1)) + buf.seek(command_start) + buf.write(_pack_int(length - command_start)) + + return to_send, length + + +class _OpReply: + """A MongoDB OP_REPLY response message.""" + + __slots__ = ("flags", "cursor_id", "number_returned", "documents") + + UNPACK_FROM = struct.Struct("<iqii").unpack_from + OP_CODE = 1 + + def __init__(self, flags: int, cursor_id: int, number_returned: int, documents: bytes): + self.flags = flags + self.cursor_id = Int64(cursor_id) + self.number_returned = number_returned + self.documents = documents + + def raw_response( + self, cursor_id: Optional[int] = None, user_fields: Optional[Mapping[str, Any]] = None + ) -> List[bytes]: + """Check the response header from the database, without decoding BSON. + + Check the response for errors and unpack. + + Can raise CursorNotFound, NotPrimaryError, ExecutionTimeout, or + OperationFailure. + + :Parameters: + - `cursor_id` (optional): cursor_id we sent to get this response - + used for raising an informative exception when we get cursor id not + valid at server response. + """ + if self.flags & 1: + # Shouldn't get this response if we aren't doing a getMore + if cursor_id is None: + raise ProtocolError("No cursor id for getMore operation") + + # Fake a getMore command response. OP_GET_MORE provides no + # document. + msg = "Cursor not found, cursor id: %d" % (cursor_id,) + errobj = {"ok": 0, "errmsg": msg, "code": 43} + raise CursorNotFound(msg, 43, errobj) + elif self.flags & 2: + error_object: dict = bson.BSON(self.documents).decode() + # Fake the ok field if it doesn't exist. + error_object.setdefault("ok", 0) + if error_object["$err"].startswith(HelloCompat.LEGACY_ERROR): + raise NotPrimaryError(error_object["$err"], error_object) + elif error_object.get("code") == 50: + default_msg = "operation exceeded time limit" + raise ExecutionTimeout( + error_object.get("$err", default_msg), error_object.get("code"), error_object + ) + raise OperationFailure( + "database error: %s" % error_object.get("$err"), + error_object.get("code"), + error_object, + ) + if self.documents: + return [self.documents] + return [] + + def unpack_response( + self, + cursor_id: Optional[int] = None, + codec_options: CodecOptions = _UNICODE_REPLACE_CODEC_OPTIONS, + user_fields: Optional[Mapping[str, Any]] = None, + legacy_response: bool = False, + ) -> List[Dict[str, Any]]: + """Unpack a response from the database and decode the BSON document(s). + + Check the response for errors and unpack, returning a dictionary + containing the response data. + + Can raise CursorNotFound, NotPrimaryError, ExecutionTimeout, or + OperationFailure. + + :Parameters: + - `cursor_id` (optional): cursor_id we sent to get this response - + used for raising an informative exception when we get cursor id not + valid at server response + - `codec_options` (optional): an instance of + :class:`~bson.codec_options.CodecOptions` + - `user_fields` (optional): Response fields that should be decoded + using the TypeDecoders from codec_options, passed to + bson._decode_all_selective. + """ + self.raw_response(cursor_id) + if legacy_response: + return bson.decode_all(self.documents, codec_options) + return bson._decode_all_selective(self.documents, codec_options, user_fields) + + def command_response(self, codec_options: CodecOptions) -> Dict[str, Any]: + """Unpack a command response.""" + docs = self.unpack_response(codec_options=codec_options) + assert self.number_returned == 1 + return docs[0] + + def raw_command_response(self) -> NoReturn: + """Return the bytes of the command response.""" + # This should never be called on _OpReply. + raise NotImplementedError + + @property + def more_to_come(self) -> bool: + """Is the moreToCome bit set on this response?""" + return False + + @classmethod + def unpack(cls, msg: bytes) -> _OpReply: + """Construct an _OpReply from raw bytes.""" + # PYTHON-945: ignore starting_from field. + flags, cursor_id, _, number_returned = cls.UNPACK_FROM(msg) + + documents = msg[20:] + return cls(flags, cursor_id, number_returned, documents) + + +class _OpMsg: + """A MongoDB OP_MSG response message.""" + + __slots__ = ("flags", "cursor_id", "number_returned", "payload_document") + + UNPACK_FROM = struct.Struct("<IBi").unpack_from + OP_CODE = 2013 + + # Flag bits. + CHECKSUM_PRESENT = 1 + MORE_TO_COME = 1 << 1 + EXHAUST_ALLOWED = 1 << 16 # Only present on requests. + + def __init__(self, flags: int, payload_document: bytes): + self.flags = flags + self.payload_document = payload_document + + def raw_response( + self, + cursor_id: Optional[int] = None, + user_fields: Optional[Mapping[str, Any]] = {}, # noqa: B006 + ) -> List[Mapping[str, Any]]: + """ + cursor_id is ignored + user_fields is used to determine which fields must not be decoded + """ + inflated_response = _decode_selective( + RawBSONDocument(self.payload_document), user_fields, _RAW_ARRAY_BSON_OPTIONS + ) + return [inflated_response] + + def unpack_response( + self, + cursor_id: Optional[int] = None, + codec_options: CodecOptions = _UNICODE_REPLACE_CODEC_OPTIONS, + user_fields: Optional[Mapping[str, Any]] = None, + legacy_response: bool = False, + ) -> List[Dict[str, Any]]: + """Unpack a OP_MSG command response. + + :Parameters: + - `cursor_id` (optional): Ignored, for compatibility with _OpReply. + - `codec_options` (optional): an instance of + :class:`~bson.codec_options.CodecOptions` + - `user_fields` (optional): Response fields that should be decoded + using the TypeDecoders from codec_options, passed to + bson._decode_all_selective. + """ + # If _OpMsg is in-use, this cannot be a legacy response. + assert not legacy_response + return bson._decode_all_selective(self.payload_document, codec_options, user_fields) + + def command_response(self, codec_options: CodecOptions) -> Dict[str, Any]: + """Unpack a command response.""" + return self.unpack_response(codec_options=codec_options)[0] + + def raw_command_response(self) -> bytes: + """Return the bytes of the command response.""" + return self.payload_document + + @property + def more_to_come(self) -> bool: + """Is the moreToCome bit set on this response?""" + return bool(self.flags & self.MORE_TO_COME) + + @classmethod + def unpack(cls, msg: bytes) -> _OpMsg: + """Construct an _OpMsg from raw bytes.""" + flags, first_payload_type, first_payload_size = cls.UNPACK_FROM(msg) + if flags != 0: + if flags & cls.CHECKSUM_PRESENT: + raise ProtocolError(f"Unsupported OP_MSG flag checksumPresent: 0x{flags:x}") + + if flags ^ cls.MORE_TO_COME: + raise ProtocolError(f"Unsupported OP_MSG flags: 0x{flags:x}") + if first_payload_type != 0: + raise ProtocolError(f"Unsupported OP_MSG payload type: 0x{first_payload_type:x}") + + if len(msg) != first_payload_size + 5: + raise ProtocolError("Unsupported OP_MSG reply: >1 section") + + payload_document = msg[5:] + return cls(flags, payload_document) + + +_UNPACK_REPLY: Dict[int, Callable[[bytes], Union[_OpReply, _OpMsg]]] = { + _OpReply.OP_CODE: _OpReply.unpack, + _OpMsg.OP_CODE: _OpMsg.unpack, +} diff --git a/backend/test/lib/python3.8/site-packages/pymongo/mongo_client.py b/backend/test/lib/python3.8/site-packages/pymongo/mongo_client.py new file mode 100644 index 0000000000000000000000000000000000000000..5699c3db8b7aaa97b83f6f42b4b55159069012fc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/mongo_client.py @@ -0,0 +1,2326 @@ +# Copyright 2009-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Tools for connecting to MongoDB. + +.. seealso:: :doc:`/examples/high_availability` for examples of connecting + to replica sets or sets of mongos servers. + +To get a :class:`~pymongo.database.Database` instance from a +:class:`MongoClient` use either dictionary-style or attribute-style +access: + +.. doctest:: + + >>> from pymongo import MongoClient + >>> c = MongoClient() + >>> c.test_database + Database(MongoClient(host=['localhost:27017'], document_class=dict, tz_aware=False, connect=True), 'test_database') + >>> c["test-database"] + Database(MongoClient(host=['localhost:27017'], document_class=dict, tz_aware=False, connect=True), 'test-database') +""" +from __future__ import annotations + +import contextlib +import os +import weakref +from collections import defaultdict +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ContextManager, + Dict, + FrozenSet, + Generic, + Iterator, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) + +import bson +from bson.codec_options import DEFAULT_CODEC_OPTIONS, TypeRegistry +from bson.son import SON +from bson.timestamp import Timestamp +from pymongo import ( + _csot, + client_session, + common, + database, + helpers, + message, + periodic_executor, + uri_parser, +) +from pymongo.change_stream import ChangeStream, ClusterChangeStream +from pymongo.client_options import ClientOptions +from pymongo.client_session import _EmptyServerSession +from pymongo.command_cursor import CommandCursor +from pymongo.errors import ( + AutoReconnect, + BulkWriteError, + ConfigurationError, + ConnectionFailure, + InvalidOperation, + NotPrimaryError, + OperationFailure, + PyMongoError, + ServerSelectionTimeoutError, + WaitQueueTimeoutError, +) +from pymongo.lock import _HAS_REGISTER_AT_FORK, _create_lock, _release_locks +from pymongo.pool import ConnectionClosedReason +from pymongo.read_preferences import ReadPreference, _ServerMode +from pymongo.server_selectors import writable_server_selector +from pymongo.server_type import SERVER_TYPE +from pymongo.settings import TopologySettings +from pymongo.topology import Topology, _ErrorContext +from pymongo.topology_description import TOPOLOGY_TYPE, TopologyDescription +from pymongo.typings import ( + ClusterTime, + _Address, + _CollationIn, + _DocumentType, + _DocumentTypeArg, + _Pipeline, +) +from pymongo.uri_parser import ( + _check_options, + _handle_option_deprecations, + _handle_security_options, + _normalize_options, +) +from pymongo.write_concern import DEFAULT_WRITE_CONCERN, WriteConcern + +if TYPE_CHECKING: + import sys + from types import TracebackType + + from bson.objectid import ObjectId + from pymongo.bulk import _Bulk + from pymongo.client_session import ClientSession, _ServerSession + from pymongo.cursor import _ConnectionManager + from pymongo.database import Database + from pymongo.message import _CursorAddress, _GetMore, _Query + from pymongo.pool import Connection + from pymongo.read_concern import ReadConcern + from pymongo.response import Response + from pymongo.server import Server + from pymongo.server_selectors import Selection + + if sys.version_info[:2] >= (3, 9): + from collections.abc import Generator + else: + # Deprecated since version 3.9: collections.abc.Generator now supports []. + from typing import Generator + +T = TypeVar("T") + + +class MongoClient(common.BaseObject, Generic[_DocumentType]): + """ + A client-side representation of a MongoDB cluster. + + Instances can represent either a standalone MongoDB server, a replica + set, or a sharded cluster. Instances of this class are responsible for + maintaining up-to-date state of the cluster, and possibly cache + resources related to this, including background threads for monitoring, + and connection pools. + """ + + HOST = "localhost" + PORT = 27017 + # Define order to retrieve options from ClientOptions for __repr__. + # No host/port; these are retrieved from TopologySettings. + _constructor_args = ("document_class", "tz_aware", "connect") + _clients: weakref.WeakValueDictionary = weakref.WeakValueDictionary() + + def __init__( + self, + host: Optional[Union[str, Sequence[str]]] = None, + port: Optional[int] = None, + document_class: Optional[Type[_DocumentType]] = None, + tz_aware: Optional[bool] = None, + connect: Optional[bool] = None, + type_registry: Optional[TypeRegistry] = None, + **kwargs: Any, + ) -> None: + """Client for a MongoDB instance, a replica set, or a set of mongoses. + + .. warning:: Starting in PyMongo 4.0, ``directConnection`` now has a default value of + False instead of None. + For more details, see the relevant section of the PyMongo 4.x migration guide: + :ref:`pymongo4-migration-direct-connection`. + + The client object is thread-safe and has connection-pooling built in. + If an operation fails because of a network error, + :class:`~pymongo.errors.ConnectionFailure` is raised and the client + reconnects in the background. Application code should handle this + exception (recognizing that the operation failed) and then continue to + execute. + + The `host` parameter can be a full `mongodb URI + <http://dochub.mongodb.org/core/connections>`_, in addition to + a simple hostname. It can also be a list of hostnames but no more + than one URI. Any port specified in the host string(s) will override + the `port` parameter. For username and + passwords reserved characters like ':', '/', '+' and '@' must be + percent encoded following RFC 2396:: + + from urllib.parse import quote_plus + + uri = "mongodb://%s:%s@%s" % ( + quote_plus(user), quote_plus(password), host) + client = MongoClient(uri) + + Unix domain sockets are also supported. The socket path must be percent + encoded in the URI:: + + uri = "mongodb://%s:%s@%s" % ( + quote_plus(user), quote_plus(password), quote_plus(socket_path)) + client = MongoClient(uri) + + But not when passed as a simple hostname:: + + client = MongoClient('/tmp/mongodb-27017.sock') + + Starting with version 3.6, PyMongo supports mongodb+srv:// URIs. The + URI must include one, and only one, hostname. The hostname will be + resolved to one or more DNS `SRV records + <https://en.wikipedia.org/wiki/SRV_record>`_ which will be used + as the seed list for connecting to the MongoDB deployment. When using + SRV URIs, the `authSource` and `replicaSet` configuration options can + be specified using `TXT records + <https://en.wikipedia.org/wiki/TXT_record>`_. See the + `Initial DNS Seedlist Discovery spec + <https://github.com/mongodb/specifications/blob/master/source/ + initial-dns-seedlist-discovery/initial-dns-seedlist-discovery.rst>`_ + for more details. Note that the use of SRV URIs implicitly enables + TLS support. Pass tls=false in the URI to override. + + .. note:: MongoClient creation will block waiting for answers from + DNS when mongodb+srv:// URIs are used. + + .. note:: Starting with version 3.0 the :class:`MongoClient` + constructor no longer blocks while connecting to the server or + servers, and it no longer raises + :class:`~pymongo.errors.ConnectionFailure` if they are + unavailable, nor :class:`~pymongo.errors.ConfigurationError` + if the user's credentials are wrong. Instead, the constructor + returns immediately and launches the connection process on + background threads. You can check if the server is available + like this:: + + from pymongo.errors import ConnectionFailure + client = MongoClient() + try: + # The ping command is cheap and does not require auth. + client.admin.command('ping') + except ConnectionFailure: + print("Server not available") + + .. warning:: When using PyMongo in a multiprocessing context, please + read :ref:`multiprocessing` first. + + .. note:: Many of the following options can be passed using a MongoDB + URI or keyword parameters. If the same option is passed in a URI and + as a keyword parameter the keyword parameter takes precedence. + + :Parameters: + - `host` (optional): hostname or IP address or Unix domain socket + path of a single mongod or mongos instance to connect to, or a + mongodb URI, or a list of hostnames (but no more than one mongodb + URI). If `host` is an IPv6 literal it must be enclosed in '[' + and ']' characters + following the RFC2732 URL syntax (e.g. '[::1]' for localhost). + Multihomed and round robin DNS addresses are **not** supported. + - `port` (optional): port number on which to connect + - `document_class` (optional): default class to use for + documents returned from queries on this client + - `tz_aware` (optional): if ``True``, + :class:`~datetime.datetime` instances returned as values + in a document by this :class:`MongoClient` will be timezone + aware (otherwise they will be naive) + - `connect` (optional): if ``True`` (the default), immediately + begin connecting to MongoDB in the background. Otherwise connect + on the first operation. + - `type_registry` (optional): instance of + :class:`~bson.codec_options.TypeRegistry` to enable encoding + and decoding of custom types. + - `datetime_conversion`: Specifies how UTC datetimes should be decoded + within BSON. Valid options include 'datetime_ms' to return as a + DatetimeMS, 'datetime' to return as a datetime.datetime and + raising a ValueError for out-of-range values, 'datetime_auto' to + return DatetimeMS objects when the underlying datetime is + out-of-range and 'datetime_clamp' to clamp to the minimum and + maximum possible datetimes. Defaults to 'datetime'. See + :ref:`handling-out-of-range-datetimes` for details. + + | **Other optional parameters can be passed as keyword arguments:** + + - `directConnection` (optional): if ``True``, forces this client to + connect directly to the specified MongoDB host as a standalone. + If ``false``, the client connects to the entire replica set of + which the given MongoDB host(s) is a part. If this is ``True`` + and a mongodb+srv:// URI or a URI containing multiple seeds is + provided, an exception will be raised. + - `maxPoolSize` (optional): The maximum allowable number of + concurrent connections to each connected server. Requests to a + server will block if there are `maxPoolSize` outstanding + connections to the requested server. Defaults to 100. Can be + either 0 or None, in which case there is no limit on the number + of concurrent connections. + - `minPoolSize` (optional): The minimum required number of concurrent + connections that the pool will maintain to each connected server. + Default is 0. + - `maxIdleTimeMS` (optional): The maximum number of milliseconds that + a connection can remain idle in the pool before being removed and + replaced. Defaults to `None` (no limit). + - `maxConnecting` (optional): The maximum number of connections that + each pool can establish concurrently. Defaults to `2`. + - `timeoutMS`: (integer or None) Controls how long (in + milliseconds) the driver will wait when executing an operation + (including retry attempts) before raising a timeout error. + ``0`` or ``None`` means no timeout. + - `socketTimeoutMS`: (integer or None) Controls how long (in + milliseconds) the driver will wait for a response after sending an + ordinary (non-monitoring) database operation before concluding that + a network error has occurred. ``0`` or ``None`` means no timeout. + Defaults to ``None`` (no timeout). + - `connectTimeoutMS`: (integer or None) Controls how long (in + milliseconds) the driver will wait during server monitoring when + connecting a new socket to a server before concluding the server + is unavailable. ``0`` or ``None`` means no timeout. + Defaults to ``20000`` (20 seconds). + - `server_selector`: (callable or None) Optional, user-provided + function that augments server selection rules. The function should + accept as an argument a list of + :class:`~pymongo.server_description.ServerDescription` objects and + return a list of server descriptions that should be considered + suitable for the desired operation. + - `serverSelectionTimeoutMS`: (integer) Controls how long (in + milliseconds) the driver will wait to find an available, + appropriate server to carry out a database operation; while it is + waiting, multiple server monitoring operations may be carried out, + each controlled by `connectTimeoutMS`. Defaults to ``30000`` (30 + seconds). + - `waitQueueTimeoutMS`: (integer or None) How long (in milliseconds) + a thread will wait for a socket from the pool if the pool has no + free sockets. Defaults to ``None`` (no timeout). + - `heartbeatFrequencyMS`: (optional) The number of milliseconds + between periodic server checks, or None to accept the default + frequency of 10 seconds. + - `appname`: (string or None) The name of the application that + created this MongoClient instance. The server will log this value + upon establishing each connection. It is also recorded in the slow + query log and profile collections. + - `driver`: (pair or None) A driver implemented on top of PyMongo can + pass a :class:`~pymongo.driver_info.DriverInfo` to add its name, + version, and platform to the message printed in the server log when + establishing a connection. + - `event_listeners`: a list or tuple of event listeners. See + :mod:`~pymongo.monitoring` for details. + - `retryWrites`: (boolean) Whether supported write operations + executed within this MongoClient will be retried once after a + network error. Defaults to ``True``. + The supported write operations are: + + - :meth:`~pymongo.collection.Collection.bulk_write`, as long as + :class:`~pymongo.operations.UpdateMany` or + :class:`~pymongo.operations.DeleteMany` are not included. + - :meth:`~pymongo.collection.Collection.delete_one` + - :meth:`~pymongo.collection.Collection.insert_one` + - :meth:`~pymongo.collection.Collection.insert_many` + - :meth:`~pymongo.collection.Collection.replace_one` + - :meth:`~pymongo.collection.Collection.update_one` + - :meth:`~pymongo.collection.Collection.find_one_and_delete` + - :meth:`~pymongo.collection.Collection.find_one_and_replace` + - :meth:`~pymongo.collection.Collection.find_one_and_update` + + Unsupported write operations include, but are not limited to, + :meth:`~pymongo.collection.Collection.aggregate` using the ``$out`` + pipeline operator and any operation with an unacknowledged write + concern (e.g. {w: 0})). See + https://github.com/mongodb/specifications/blob/master/source/retryable-writes/retryable-writes.rst + - `retryReads`: (boolean) Whether supported read operations + executed within this MongoClient will be retried once after a + network error. Defaults to ``True``. + The supported read operations are: + :meth:`~pymongo.collection.Collection.find`, + :meth:`~pymongo.collection.Collection.find_one`, + :meth:`~pymongo.collection.Collection.aggregate` without ``$out``, + :meth:`~pymongo.collection.Collection.distinct`, + :meth:`~pymongo.collection.Collection.count`, + :meth:`~pymongo.collection.Collection.estimated_document_count`, + :meth:`~pymongo.collection.Collection.count_documents`, + :meth:`pymongo.collection.Collection.watch`, + :meth:`~pymongo.collection.Collection.list_indexes`, + :meth:`pymongo.database.Database.watch`, + :meth:`~pymongo.database.Database.list_collections`, + :meth:`pymongo.mongo_client.MongoClient.watch`, + and :meth:`~pymongo.mongo_client.MongoClient.list_databases`. + + Unsupported read operations include, but are not limited to + :meth:`~pymongo.database.Database.command` and any getMore + operation on a cursor. + + Enabling retryable reads makes applications more resilient to + transient errors such as network failures, database upgrades, and + replica set failovers. For an exact definition of which errors + trigger a retry, see the `retryable reads specification + <https://github.com/mongodb/specifications/blob/master/source/retryable-reads/retryable-reads.rst>`_. + + - `compressors`: Comma separated list of compressors for wire + protocol compression. The list is used to negotiate a compressor + with the server. Currently supported options are "snappy", "zlib" + and "zstd". Support for snappy requires the + `python-snappy <https://pypi.org/project/python-snappy/>`_ package. + zlib support requires the Python standard library zlib module. zstd + requires the `zstandard <https://pypi.org/project/zstandard/>`_ + package. By default no compression is used. Compression support + must also be enabled on the server. MongoDB 3.6+ supports snappy + and zlib compression. MongoDB 4.2+ adds support for zstd. + - `zlibCompressionLevel`: (int) The zlib compression level to use + when zlib is used as the wire protocol compressor. Supported values + are -1 through 9. -1 tells the zlib library to use its default + compression level (usually 6). 0 means no compression. 1 is best + speed. 9 is best compression. Defaults to -1. + - `uuidRepresentation`: The BSON representation to use when encoding + from and decoding to instances of :class:`~uuid.UUID`. Valid + values are the strings: "standard", "pythonLegacy", "javaLegacy", + "csharpLegacy", and "unspecified" (the default). New applications + should consider setting this to "standard" for cross language + compatibility. See :ref:`handling-uuid-data-example` for details. + - `unicode_decode_error_handler`: The error handler to apply when + a Unicode-related error occurs during BSON decoding that would + otherwise raise :exc:`UnicodeDecodeError`. Valid options include + 'strict', 'replace', 'backslashreplace', 'surrogateescape', and + 'ignore'. Defaults to 'strict'. + - `srvServiceName`: (string) The SRV service name to use for + "mongodb+srv://" URIs. Defaults to "mongodb". Use it like so:: + + MongoClient("mongodb+srv://example.com/?srvServiceName=customname") + - `srvMaxHosts`: (int) limits the number of mongos-like hosts a client will + connect to. More specifically, when a "mongodb+srv://" connection string + resolves to more than srvMaxHosts number of hosts, the client will randomly + choose an srvMaxHosts sized subset of hosts. + + + | **Write Concern options:** + | (Only set if passed. No default values.) + + - `w`: (integer or string) If this is a replica set, write operations + will block until they have been replicated to the specified number + or tagged set of servers. `w=<int>` always includes the replica set + primary (e.g. w=3 means write to the primary and wait until + replicated to **two** secondaries). Passing w=0 **disables write + acknowledgement** and all other write concern options. + - `wTimeoutMS`: (integer) Used in conjunction with `w`. Specify a value + in milliseconds to control how long to wait for write propagation + to complete. If replication does not complete in the given + timeframe, a timeout exception is raised. Passing wTimeoutMS=0 + will cause **write operations to wait indefinitely**. + - `journal`: If ``True`` block until write operations have been + committed to the journal. Cannot be used in combination with + `fsync`. Write operations will fail with an exception if this + option is used when the server is running without journaling. + - `fsync`: If ``True`` and the server is running without journaling, + blocks until the server has synced all data files to disk. If the + server is running with journaling, this acts the same as the `j` + option, blocking until write operations have been committed to the + journal. Cannot be used in combination with `j`. + + | **Replica set keyword arguments for connecting with a replica set + - either directly or via a mongos:** + + - `replicaSet`: (string or None) The name of the replica set to + connect to. The driver will verify that all servers it connects to + match this name. Implies that the hosts specified are a seed list + and the driver should attempt to find all members of the set. + Defaults to ``None``. + + | **Read Preference:** + + - `readPreference`: The replica set read preference for this client. + One of ``primary``, ``primaryPreferred``, ``secondary``, + ``secondaryPreferred``, or ``nearest``. Defaults to ``primary``. + - `readPreferenceTags`: Specifies a tag set as a comma-separated list + of colon-separated key-value pairs. For example ``dc:ny,rack:1``. + Defaults to ``None``. + - `maxStalenessSeconds`: (integer) The maximum estimated + length of time a replica set secondary can fall behind the primary + in replication before it will no longer be selected for operations. + Defaults to ``-1``, meaning no maximum. If maxStalenessSeconds + is set, it must be a positive integer greater than or equal to + 90 seconds. + + .. seealso:: :doc:`/examples/server_selection` + + | **Authentication:** + + - `username`: A string. + - `password`: A string. + + Although username and password must be percent-escaped in a MongoDB + URI, they must not be percent-escaped when passed as parameters. In + this example, both the space and slash special characters are passed + as-is:: + + MongoClient(username="user name", password="pass/word") + + - `authSource`: The database to authenticate on. Defaults to the + database specified in the URI, if provided, or to "admin". + - `authMechanism`: See :data:`~pymongo.auth.MECHANISMS` for options. + If no mechanism is specified, PyMongo automatically SCRAM-SHA-1 + when connected to MongoDB 3.6 and negotiates the mechanism to use + (SCRAM-SHA-1 or SCRAM-SHA-256) when connected to MongoDB 4.0+. + - `authMechanismProperties`: Used to specify authentication mechanism + specific options. To specify the service name for GSSAPI + authentication pass authMechanismProperties='SERVICE_NAME:<service + name>'. + To specify the session token for MONGODB-AWS authentication pass + ``authMechanismProperties='AWS_SESSION_TOKEN:<session token>'``. + + .. seealso:: :doc:`/examples/authentication` + + | **TLS/SSL configuration:** + + - `tls`: (boolean) If ``True``, create the connection to the server + using transport layer security. Defaults to ``False``. + - `tlsInsecure`: (boolean) Specify whether TLS constraints should be + relaxed as much as possible. Setting ``tlsInsecure=True`` implies + ``tlsAllowInvalidCertificates=True`` and + ``tlsAllowInvalidHostnames=True``. Defaults to ``False``. Think + very carefully before setting this to ``True`` as it dramatically + reduces the security of TLS. + - `tlsAllowInvalidCertificates`: (boolean) If ``True``, continues + the TLS handshake regardless of the outcome of the certificate + verification process. If this is ``False``, and a value is not + provided for ``tlsCAFile``, PyMongo will attempt to load system + provided CA certificates. If the python version in use does not + support loading system CA certificates then the ``tlsCAFile`` + parameter must point to a file of CA certificates. + ``tlsAllowInvalidCertificates=False`` implies ``tls=True``. + Defaults to ``False``. Think very carefully before setting this + to ``True`` as that could make your application vulnerable to + on-path attackers. + - `tlsAllowInvalidHostnames`: (boolean) If ``True``, disables TLS + hostname verification. ``tlsAllowInvalidHostnames=False`` implies + ``tls=True``. Defaults to ``False``. Think very carefully before + setting this to ``True`` as that could make your application + vulnerable to on-path attackers. + - `tlsCAFile`: A file containing a single or a bundle of + "certification authority" certificates, which are used to validate + certificates passed from the other end of the connection. + Implies ``tls=True``. Defaults to ``None``. + - `tlsCertificateKeyFile`: A file containing the client certificate + and private key. Implies ``tls=True``. Defaults to ``None``. + - `tlsCRLFile`: A file containing a PEM or DER formatted + certificate revocation list. Implies ``tls=True``. Defaults to + ``None``. + - `tlsCertificateKeyFilePassword`: The password or passphrase for + decrypting the private key in ``tlsCertificateKeyFile``. Only + necessary if the private key is encrypted. Defaults to ``None``. + - `tlsDisableOCSPEndpointCheck`: (boolean) If ``True``, disables + certificate revocation status checking via the OCSP responder + specified on the server certificate. + ``tlsDisableOCSPEndpointCheck=False`` implies ``tls=True``. + Defaults to ``False``. + - `ssl`: (boolean) Alias for ``tls``. + + | **Read Concern options:** + | (If not set explicitly, this will use the server default) + + - `readConcernLevel`: (string) The read concern level specifies the + level of isolation for read operations. For example, a read + operation using a read concern level of ``majority`` will only + return data that has been written to a majority of nodes. If the + level is left unspecified, the server default will be used. + + | **Client side encryption options:** + | (If not set explicitly, client side encryption will not be enabled.) + + - `auto_encryption_opts`: A + :class:`~pymongo.encryption_options.AutoEncryptionOpts` which + configures this client to automatically encrypt collection commands + and automatically decrypt results. See + :ref:`automatic-client-side-encryption` for an example. + If a :class:`MongoClient` is configured with + ``auto_encryption_opts`` and a non-None ``maxPoolSize``, a + separate internal ``MongoClient`` is created if any of the + following are true: + + - A ``key_vault_client`` is not passed to + :class:`~pymongo.encryption_options.AutoEncryptionOpts` + - ``bypass_auto_encrpytion=False`` is passed to + :class:`~pymongo.encryption_options.AutoEncryptionOpts` + + | **Stable API options:** + | (If not set explicitly, Stable API will not be enabled.) + + - `server_api`: A + :class:`~pymongo.server_api.ServerApi` which configures this + client to use Stable API. See :ref:`versioned-api-ref` for + details. + + .. seealso:: The MongoDB documentation on `connections <https://dochub.mongodb.org/core/connections>`_. + + .. versionchanged:: 4.2 + Added the ``timeoutMS`` keyword argument. + + .. versionchanged:: 4.0 + + - Removed the fsync, unlock, is_locked, database_names, and + close_cursor methods. + See the :ref:`pymongo4-migration-guide`. + - Removed the ``waitQueueMultiple`` and ``socketKeepAlive`` + keyword arguments. + - The default for `uuidRepresentation` was changed from + ``pythonLegacy`` to ``unspecified``. + - Added the ``srvServiceName``, ``maxConnecting``, and ``srvMaxHosts`` URI and + keyword arguments. + + .. versionchanged:: 3.12 + Added the ``server_api`` keyword argument. + The following keyword arguments were deprecated: + + - ``ssl_certfile`` and ``ssl_keyfile`` were deprecated in favor + of ``tlsCertificateKeyFile``. + + .. versionchanged:: 3.11 + Added the following keyword arguments and URI options: + + - ``tlsDisableOCSPEndpointCheck`` + - ``directConnection`` + + .. versionchanged:: 3.9 + Added the ``retryReads`` keyword argument and URI option. + Added the ``tlsInsecure`` keyword argument and URI option. + The following keyword arguments and URI options were deprecated: + + - ``wTimeout`` was deprecated in favor of ``wTimeoutMS``. + - ``j`` was deprecated in favor of ``journal``. + - ``ssl_cert_reqs`` was deprecated in favor of + ``tlsAllowInvalidCertificates``. + - ``ssl_match_hostname`` was deprecated in favor of + ``tlsAllowInvalidHostnames``. + - ``ssl_ca_certs`` was deprecated in favor of ``tlsCAFile``. + - ``ssl_certfile`` was deprecated in favor of + ``tlsCertificateKeyFile``. + - ``ssl_crlfile`` was deprecated in favor of ``tlsCRLFile``. + - ``ssl_pem_passphrase`` was deprecated in favor of + ``tlsCertificateKeyFilePassword``. + + .. versionchanged:: 3.9 + ``retryWrites`` now defaults to ``True``. + + .. versionchanged:: 3.8 + Added the ``server_selector`` keyword argument. + Added the ``type_registry`` keyword argument. + + .. versionchanged:: 3.7 + Added the ``driver`` keyword argument. + + .. versionchanged:: 3.6 + Added support for mongodb+srv:// URIs. + Added the ``retryWrites`` keyword argument and URI option. + + .. versionchanged:: 3.5 + Add ``username`` and ``password`` options. Document the + ``authSource``, ``authMechanism``, and ``authMechanismProperties`` + options. + Deprecated the ``socketKeepAlive`` keyword argument and URI option. + ``socketKeepAlive`` now defaults to ``True``. + + .. versionchanged:: 3.0 + :class:`~pymongo.mongo_client.MongoClient` is now the one and only + client class for a standalone server, mongos, or replica set. + It includes the functionality that had been split into + :class:`~pymongo.mongo_client.MongoReplicaSetClient`: it can connect + to a replica set, discover all its members, and monitor the set for + stepdowns, elections, and reconfigs. + + The :class:`~pymongo.mongo_client.MongoClient` constructor no + longer blocks while connecting to the server or servers, and it no + longer raises :class:`~pymongo.errors.ConnectionFailure` if they + are unavailable, nor :class:`~pymongo.errors.ConfigurationError` + if the user's credentials are wrong. Instead, the constructor + returns immediately and launches the connection process on + background threads. + + Therefore the ``alive`` method is removed since it no longer + provides meaningful information; even if the client is disconnected, + it may discover a server in time to fulfill the next operation. + + In PyMongo 2.x, :class:`~pymongo.MongoClient` accepted a list of + standalone MongoDB servers and used the first it could connect to:: + + MongoClient(['host1.com:27017', 'host2.com:27017']) + + A list of multiple standalones is no longer supported; if multiple + servers are listed they must be members of the same replica set, or + mongoses in the same sharded cluster. + + The behavior for a list of mongoses is changed from "high + availability" to "load balancing". Before, the client connected to + the lowest-latency mongos in the list, and used it until a network + error prompted it to re-evaluate all mongoses' latencies and + reconnect to one of them. In PyMongo 3, the client monitors its + network latency to all the mongoses continuously, and distributes + operations evenly among those with the lowest latency. See + :ref:`mongos-load-balancing` for more information. + + The ``connect`` option is added. + + The ``start_request``, ``in_request``, and ``end_request`` methods + are removed, as well as the ``auto_start_request`` option. + + The ``copy_database`` method is removed, see the + :doc:`copy_database examples </examples/copydb>` for alternatives. + + The :meth:`MongoClient.disconnect` method is removed; it was a + synonym for :meth:`~pymongo.MongoClient.close`. + + :class:`~pymongo.mongo_client.MongoClient` no longer returns an + instance of :class:`~pymongo.database.Database` for attribute names + with leading underscores. You must use dict-style lookups instead:: + + client['__my_database__'] + + Not:: + + client.__my_database__ + """ + doc_class = document_class or dict + self.__init_kwargs: Dict[str, Any] = { + "host": host, + "port": port, + "document_class": doc_class, + "tz_aware": tz_aware, + "connect": connect, + "type_registry": type_registry, + **kwargs, + } + + if host is None: + host = self.HOST + if isinstance(host, str): + host = [host] + if port is None: + port = self.PORT + if not isinstance(port, int): + raise TypeError("port must be an instance of int") + + # _pool_class, _monitor_class, and _condition_class are for deep + # customization of PyMongo, e.g. Motor. + pool_class = kwargs.pop("_pool_class", None) + monitor_class = kwargs.pop("_monitor_class", None) + condition_class = kwargs.pop("_condition_class", None) + + # Parse options passed as kwargs. + keyword_opts = common._CaseInsensitiveDictionary(kwargs) + keyword_opts["document_class"] = doc_class + + seeds = set() + username = None + password = None + dbase = None + opts = common._CaseInsensitiveDictionary() + fqdn = None + srv_service_name = keyword_opts.get("srvservicename") + srv_max_hosts = keyword_opts.get("srvmaxhosts") + if len([h for h in host if "/" in h]) > 1: + raise ConfigurationError("host must not contain multiple MongoDB URIs") + for entity in host: + # A hostname can only include a-z, 0-9, '-' and '.'. If we find a '/' + # it must be a URI, + # https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names + if "/" in entity: + # Determine connection timeout from kwargs. + timeout = keyword_opts.get("connecttimeoutms") + if timeout is not None: + timeout = common.validate_timeout_or_none_or_zero( + keyword_opts.cased_key("connecttimeoutms"), timeout + ) + res = uri_parser.parse_uri( + entity, + port, + validate=True, + warn=True, + normalize=False, + connect_timeout=timeout, + srv_service_name=srv_service_name, + srv_max_hosts=srv_max_hosts, + ) + seeds.update(res["nodelist"]) + username = res["username"] or username + password = res["password"] or password + dbase = res["database"] or dbase + opts = res["options"] + fqdn = res["fqdn"] + else: + seeds.update(uri_parser.split_hosts(entity, port)) + if not seeds: + raise ConfigurationError("need to specify at least one host") + + # Add options with named keyword arguments to the parsed kwarg options. + if type_registry is not None: + keyword_opts["type_registry"] = type_registry + if tz_aware is None: + tz_aware = opts.get("tz_aware", False) + if connect is None: + connect = opts.get("connect", True) + keyword_opts["tz_aware"] = tz_aware + keyword_opts["connect"] = connect + + # Handle deprecated options in kwarg options. + keyword_opts = _handle_option_deprecations(keyword_opts) + # Validate kwarg options. + keyword_opts = common._CaseInsensitiveDictionary( + dict(common.validate(keyword_opts.cased_key(k), v) for k, v in keyword_opts.items()) + ) + + # Override connection string options with kwarg options. + opts.update(keyword_opts) + + if srv_service_name is None: + srv_service_name = opts.get("srvServiceName", common.SRV_SERVICE_NAME) + + srv_max_hosts = srv_max_hosts or opts.get("srvmaxhosts") + # Handle security-option conflicts in combined options. + opts = _handle_security_options(opts) + # Normalize combined options. + opts = _normalize_options(opts) + _check_options(seeds, opts) + + # Username and password passed as kwargs override user info in URI. + username = opts.get("username", username) + password = opts.get("password", password) + self.__options = options = ClientOptions(username, password, dbase, opts) + + self.__default_database_name = dbase + self.__lock = _create_lock() + self.__kill_cursors_queue: List = [] + + self._event_listeners = options.pool_options._event_listeners + super().__init__( + options.codec_options, + options.read_preference, + options.write_concern, + options.read_concern, + ) + + self._topology_settings = TopologySettings( + seeds=seeds, + replica_set_name=options.replica_set_name, + pool_class=pool_class, + pool_options=options.pool_options, + monitor_class=monitor_class, + condition_class=condition_class, + local_threshold_ms=options.local_threshold_ms, + server_selection_timeout=options.server_selection_timeout, + server_selector=options.server_selector, + heartbeat_frequency=options.heartbeat_frequency, + fqdn=fqdn, + direct_connection=options.direct_connection, + load_balanced=options.load_balanced, + srv_service_name=srv_service_name, + srv_max_hosts=srv_max_hosts, + ) + + self._init_background() + + if connect: + self._get_topology() + + self._encrypter = None + if self.__options.auto_encryption_opts: + from pymongo.encryption import _Encrypter + + self._encrypter = _Encrypter(self, self.__options.auto_encryption_opts) + self._timeout = self.__options.timeout + + if _HAS_REGISTER_AT_FORK: + # Add this client to the list of weakly referenced items. + # This will be used later if we fork. + MongoClient._clients[self._topology._topology_id] = self + + def _init_background(self) -> None: + self._topology = Topology(self._topology_settings) + + def target() -> bool: + client = self_ref() + if client is None: + return False # Stop the executor. + MongoClient._process_periodic_tasks(client) + return True + + executor = periodic_executor.PeriodicExecutor( + interval=common.KILL_CURSOR_FREQUENCY, + min_interval=common.MIN_HEARTBEAT_INTERVAL, + target=target, + name="pymongo_kill_cursors_thread", + ) + + # We strongly reference the executor and it weakly references us via + # this closure. When the client is freed, stop the executor soon. + self_ref: Any = weakref.ref(self, executor.close) + self._kill_cursors_executor = executor + + def _after_fork(self) -> None: + """Resets topology in a child after successfully forking.""" + self._init_background() + + def _duplicate(self, **kwargs: Any) -> MongoClient: + args = self.__init_kwargs.copy() + args.update(kwargs) + return MongoClient(**args) + + def _server_property(self, attr_name: str) -> Any: + """An attribute of the current server's description. + + If the client is not connected, this will block until a connection is + established or raise ServerSelectionTimeoutError if no server is + available. + + Not threadsafe if used multiple times in a single method, since + the server may change. In such cases, store a local reference to a + ServerDescription first, then use its properties. + """ + server = self._topology.select_server(writable_server_selector) + + return getattr(server.description, attr_name) + + def watch( + self, + pipeline: Optional[_Pipeline] = None, + full_document: Optional[str] = None, + resume_after: Optional[Mapping[str, Any]] = None, + max_await_time_ms: Optional[int] = None, + batch_size: Optional[int] = None, + collation: Optional[_CollationIn] = None, + start_at_operation_time: Optional[Timestamp] = None, + session: Optional[client_session.ClientSession] = None, + start_after: Optional[Mapping[str, Any]] = None, + comment: Optional[Any] = None, + full_document_before_change: Optional[str] = None, + show_expanded_events: Optional[bool] = None, + ) -> ChangeStream[_DocumentType]: + """Watch changes on this cluster. + + Performs an aggregation with an implicit initial ``$changeStream`` + stage and returns a + :class:`~pymongo.change_stream.ClusterChangeStream` cursor which + iterates over changes on all databases on this cluster. + + Introduced in MongoDB 4.0. + + .. code-block:: python + + with client.watch() as stream: + for change in stream: + print(change) + + The :class:`~pymongo.change_stream.ClusterChangeStream` iterable + blocks until the next change document is returned or an error is + raised. If the + :meth:`~pymongo.change_stream.ClusterChangeStream.next` method + encounters a network error when retrieving a batch from the server, + it will automatically attempt to recreate the cursor such that no + change events are missed. Any error encountered during the resume + attempt indicates there may be an outage and will be raised. + + .. code-block:: python + + try: + with client.watch([{"$match": {"operationType": "insert"}}]) as stream: + for insert_change in stream: + print(insert_change) + except pymongo.errors.PyMongoError: + # The ChangeStream encountered an unrecoverable error or the + # resume attempt failed to recreate the cursor. + logging.error("...") + + For a precise description of the resume process see the + `change streams specification`_. + + :Parameters: + - `pipeline` (optional): A list of aggregation pipeline stages to + append to an initial ``$changeStream`` stage. Not all + pipeline stages are valid after a ``$changeStream`` stage, see the + MongoDB documentation on change streams for the supported stages. + - `full_document` (optional): The fullDocument to pass as an option + to the ``$changeStream`` stage. Allowed values: 'updateLookup', + 'whenAvailable', 'required'. When set to 'updateLookup', the + change notification for partial updates will include both a delta + describing the changes to the document, as well as a copy of the + entire document that was changed from some time after the change + occurred. + - `full_document_before_change`: Allowed values: 'whenAvailable' + and 'required'. Change events may now result in a + 'fullDocumentBeforeChange' response field. + - `resume_after` (optional): A resume token. If provided, the + change stream will start returning changes that occur directly + after the operation specified in the resume token. A resume token + is the _id value of a change document. + - `max_await_time_ms` (optional): The maximum time in milliseconds + for the server to wait for changes before responding to a getMore + operation. + - `batch_size` (optional): The maximum number of documents to return + per batch. + - `collation` (optional): The :class:`~pymongo.collation.Collation` + to use for the aggregation. + - `start_at_operation_time` (optional): If provided, the resulting + change stream will only return changes that occurred at or after + the specified :class:`~bson.timestamp.Timestamp`. Requires + MongoDB >= 4.0. + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `start_after` (optional): The same as `resume_after` except that + `start_after` can resume notifications after an invalidate event. + This option and `resume_after` are mutually exclusive. + - `comment` (optional): A user-provided comment to attach to this + command. + - `show_expanded_events` (optional): Include expanded events such as DDL events like `dropIndexes`. + + :Returns: + A :class:`~pymongo.change_stream.ClusterChangeStream` cursor. + + .. versionchanged:: 4.3 + Added `show_expanded_events` parameter. + + .. versionchanged:: 4.2 + Added ``full_document_before_change`` parameter. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.9 + Added the ``start_after`` parameter. + + .. versionadded:: 3.7 + + .. seealso:: The MongoDB documentation on `changeStreams <https://mongodb.com/docs/manual/changeStreams/>`_. + + .. _change streams specification: + https://github.com/mongodb/specifications/blob/master/source/change-streams/change-streams.rst + """ + return ClusterChangeStream( + self.admin, + pipeline, + full_document, + resume_after, + max_await_time_ms, + batch_size, + collation, + start_at_operation_time, + session, + start_after, + comment, + full_document_before_change, + show_expanded_events=show_expanded_events, + ) + + @property + def topology_description(self) -> TopologyDescription: + """The description of the connected MongoDB deployment. + + >>> client.topology_description + <TopologyDescription id: 605a7b04e76489833a7c6113, topology_type: ReplicaSetWithPrimary, servers: [<ServerDescription ('localhost', 27017) server_type: RSPrimary, rtt: 0.0007973677999995488>, <ServerDescription ('localhost', 27018) server_type: RSSecondary, rtt: 0.0005540556000003249>, <ServerDescription ('localhost', 27019) server_type: RSSecondary, rtt: 0.0010367483999999649>]> + >>> client.topology_description.topology_type_name + 'ReplicaSetWithPrimary' + + Note that the description is periodically updated in the background + but the returned object itself is immutable. Access this property again + to get a more recent + :class:`~pymongo.topology_description.TopologyDescription`. + + :Returns: + An instance of + :class:`~pymongo.topology_description.TopologyDescription`. + + .. versionadded:: 4.0 + """ + return self._topology.description + + @property + def address(self) -> Optional[Tuple[str, int]]: + """(host, port) of the current standalone, primary, or mongos, or None. + + Accessing :attr:`address` raises :exc:`~.errors.InvalidOperation` if + the client is load-balancing among mongoses, since there is no single + address. Use :attr:`nodes` instead. + + If the client is not connected, this will block until a connection is + established or raise ServerSelectionTimeoutError if no server is + available. + + .. versionadded:: 3.0 + """ + topology_type = self._topology._description.topology_type + if ( + topology_type == TOPOLOGY_TYPE.Sharded + and len(self.topology_description.server_descriptions()) > 1 + ): + raise InvalidOperation( + 'Cannot use "address" property when load balancing among' + ' mongoses, use "nodes" instead.' + ) + if topology_type not in ( + TOPOLOGY_TYPE.ReplicaSetWithPrimary, + TOPOLOGY_TYPE.Single, + TOPOLOGY_TYPE.LoadBalanced, + TOPOLOGY_TYPE.Sharded, + ): + return None + return self._server_property("address") + + @property + def primary(self) -> Optional[Tuple[str, int]]: + """The (host, port) of the current primary of the replica set. + + Returns ``None`` if this client is not connected to a replica set, + there is no primary, or this client was created without the + `replicaSet` option. + + .. versionadded:: 3.0 + MongoClient gained this property in version 3.0. + """ + return self._topology.get_primary() # type: ignore[return-value] + + @property + def secondaries(self) -> Set[_Address]: + """The secondary members known to this client. + + A sequence of (host, port) pairs. Empty if this client is not + connected to a replica set, there are no visible secondaries, or this + client was created without the `replicaSet` option. + + .. versionadded:: 3.0 + MongoClient gained this property in version 3.0. + """ + return self._topology.get_secondaries() + + @property + def arbiters(self) -> Set[_Address]: + """Arbiters in the replica set. + + A sequence of (host, port) pairs. Empty if this client is not + connected to a replica set, there are no arbiters, or this client was + created without the `replicaSet` option. + """ + return self._topology.get_arbiters() + + @property + def is_primary(self) -> bool: + """If this client is connected to a server that can accept writes. + + True if the current server is a standalone, mongos, or the primary of + a replica set. If the client is not connected, this will block until a + connection is established or raise ServerSelectionTimeoutError if no + server is available. + """ + return self._server_property("is_writable") + + @property + def is_mongos(self) -> bool: + """If this client is connected to mongos. If the client is not + connected, this will block until a connection is established or raise + ServerSelectionTimeoutError if no server is available. + """ + return self._server_property("server_type") == SERVER_TYPE.Mongos + + @property + def nodes(self) -> FrozenSet[_Address]: + """Set of all currently connected servers. + + .. warning:: When connected to a replica set the value of :attr:`nodes` + can change over time as :class:`MongoClient`'s view of the replica + set changes. :attr:`nodes` can also be an empty set when + :class:`MongoClient` is first instantiated and hasn't yet connected + to any servers, or a network partition causes it to lose connection + to all servers. + """ + description = self._topology.description + return frozenset(s.address for s in description.known_servers) + + @property + def options(self) -> ClientOptions: + """The configuration options for this client. + + :Returns: + An instance of :class:`~pymongo.client_options.ClientOptions`. + + .. versionadded:: 4.0 + """ + return self.__options + + def _end_sessions(self, session_ids: List[_ServerSession]) -> None: + """Send endSessions command(s) with the given session ids.""" + try: + # Use Connection.command directly to avoid implicitly creating + # another session. + with self._conn_for_reads(ReadPreference.PRIMARY_PREFERRED, None) as ( + conn, + read_pref, + ): + if not conn.supports_sessions: + return + + for i in range(0, len(session_ids), common._MAX_END_SESSIONS): + spec = SON([("endSessions", session_ids[i : i + common._MAX_END_SESSIONS])]) + conn.command("admin", spec, read_preference=read_pref, client=self) + except PyMongoError: + # Drivers MUST ignore any errors returned by the endSessions + # command. + pass + + def close(self) -> None: + """Cleanup client resources and disconnect from MongoDB. + + End all server sessions created by this client by sending one or more + endSessions commands. + + Close all sockets in the connection pools and stop the monitor threads. + + .. versionchanged:: 4.0 + Once closed, the client cannot be used again and any attempt will + raise :exc:`~pymongo.errors.InvalidOperation`. + + .. versionchanged:: 3.6 + End all server sessions created by this client. + """ + session_ids = self._topology.pop_all_sessions() + if session_ids: + self._end_sessions(session_ids) + # Stop the periodic task thread and then send pending killCursor + # requests before closing the topology. + self._kill_cursors_executor.close() + self._process_kill_cursors() + self._topology.close() + if self._encrypter: + # TODO: PYTHON-1921 Encrypted MongoClients cannot be re-opened. + self._encrypter.close() + + def _get_topology(self) -> Topology: + """Get the internal :class:`~pymongo.topology.Topology` object. + + If this client was created with "connect=False", calling _get_topology + launches the connection process in the background. + """ + self._topology.open() + with self.__lock: + self._kill_cursors_executor.open() + return self._topology + + @contextlib.contextmanager + def _checkout(self, server: Server, session: Optional[ClientSession]) -> Iterator[Connection]: + in_txn = session and session.in_transaction + with _MongoClientErrorHandler(self, server, session) as err_handler: + # Reuse the pinned connection, if it exists. + if in_txn and session and session._pinned_connection: + err_handler.contribute_socket(session._pinned_connection) + yield session._pinned_connection + return + with server.checkout(handler=err_handler) as conn: + # Pin this session to the selected server or connection. + if ( + in_txn + and session + and server.description.server_type + in ( + SERVER_TYPE.Mongos, + SERVER_TYPE.LoadBalancer, + ) + ): + session._pin(server, conn) + err_handler.contribute_socket(conn) + if ( + self._encrypter + and not self._encrypter._bypass_auto_encryption + and conn.max_wire_version < 8 + ): + raise ConfigurationError( + "Auto-encryption requires a minimum MongoDB version of 4.2" + ) + yield conn + + def _select_server( + self, + server_selector: Callable[[Selection], Selection], + session: Optional[ClientSession], + address: Optional[_Address] = None, + ) -> Server: + """Select a server to run an operation on this client. + + :Parameters: + - `server_selector`: The server selector to use if the session is + not pinned and no address is given. + - `session`: The ClientSession for the next operation, or None. May + be pinned to a mongos server address. + - `address` (optional): Address when sending a message + to a specific server, used for getMore. + """ + try: + topology = self._get_topology() + if session and not session.in_transaction: + session._transaction.reset() + if not address and session: + address = session._pinned_address + if address: + # We're running a getMore or this session is pinned to a mongos. + server = topology.select_server_by_address(address) + if not server: + raise AutoReconnect("server %s:%s no longer available" % address) + else: + server = topology.select_server(server_selector) + return server + except PyMongoError as exc: + # Server selection errors in a transaction are transient. + if session and session.in_transaction: + exc._add_error_label("TransientTransactionError") + session._unpin() + raise + + def _conn_for_writes(self, session: Optional[ClientSession]) -> ContextManager[Connection]: + server = self._select_server(writable_server_selector, session) + return self._checkout(server, session) + + @contextlib.contextmanager + def _conn_from_server( + self, read_preference: _ServerMode, server: Server, session: Optional[ClientSession] + ) -> Iterator[Tuple[Connection, _ServerMode]]: + assert read_preference is not None, "read_preference must not be None" + # Get a connection for a server matching the read preference, and yield + # conn with the effective read preference. The Server Selection + # Spec says not to send any $readPreference to standalones and to + # always send primaryPreferred when directly connected to a repl set + # member. + # Thread safe: if the type is single it cannot change. + topology = self._get_topology() + single = topology.description.topology_type == TOPOLOGY_TYPE.Single + + with self._checkout(server, session) as conn: + if single: + if conn.is_repl and not (session and session.in_transaction): + # Use primary preferred to ensure any repl set member + # can handle the request. + read_preference = ReadPreference.PRIMARY_PREFERRED + elif conn.is_standalone: + # Don't send read preference to standalones. + read_preference = ReadPreference.PRIMARY + yield conn, read_preference + + def _conn_for_reads( + self, read_preference: _ServerMode, session: Optional[ClientSession] + ) -> ContextManager[Tuple[Connection, _ServerMode]]: + assert read_preference is not None, "read_preference must not be None" + _ = self._get_topology() + server = self._select_server(read_preference, session) + return self._conn_from_server(read_preference, server, session) + + def _should_pin_cursor(self, session: Optional[ClientSession]) -> Optional[bool]: + return self.__options.load_balanced and not (session and session.in_transaction) + + @_csot.apply + def _run_operation( + self, + operation: Union[_Query, _GetMore], + unpack_res: Callable, + address: Optional[_Address] = None, + ) -> Response: + """Run a _Query/_GetMore operation and return a Response. + + :Parameters: + - `operation`: a _Query or _GetMore object. + - `unpack_res`: A callable that decodes the wire protocol response. + - `address` (optional): Optional address when sending a message + to a specific server, used for getMore. + """ + if operation.conn_mgr: + server = self._select_server( + operation.read_preference, operation.session, address=address + ) + + with operation.conn_mgr.lock: + with _MongoClientErrorHandler(self, server, operation.session) as err_handler: + err_handler.contribute_socket(operation.conn_mgr.conn) + return server.run_operation( + operation.conn_mgr.conn, + operation, + operation.read_preference, + self._event_listeners, + unpack_res, + ) + + def _cmd( + session: Optional[ClientSession], + server: Server, + conn: Connection, + read_preference: _ServerMode, + ) -> Response: + operation.reset() # Reset op in case of retry. + return server.run_operation( + conn, operation, read_preference, self._event_listeners, unpack_res + ) + + return self._retryable_read( + _cmd, + operation.read_preference, + operation.session, + address=address, + retryable=isinstance(operation, message._Query), + ) + + def _retry_with_session( + self, + retryable: bool, + func: Callable[[Optional[ClientSession], Connection, bool], T], + session: Optional[ClientSession], + bulk: Optional[_Bulk], + ) -> T: + """Execute an operation with at most one consecutive retries + + Returns func()'s return value on success. On error retries the same + command once. + + Re-raises any exception thrown by func(). + """ + retryable = bool( + retryable and self.options.retry_writes and session and not session.in_transaction + ) + return self._retry_internal(retryable, func, session, bulk) + + @_csot.apply + def _retry_internal( + self, + retryable: bool, + func: Callable[[Optional[ClientSession], Connection, bool], T], + session: Optional[ClientSession], + bulk: Optional[_Bulk], + ) -> T: + """Internal retryable write helper.""" + max_wire_version = 0 + last_error: Optional[Exception] = None + retrying = False + multiple_retries = _csot.get_timeout() is not None + + def is_retrying() -> bool: + return bulk.retrying if bulk else retrying + + # Increment the transaction id up front to ensure any retry attempt + # will use the proper txnNumber, even if server or socket selection + # fails before the command can be sent. + if retryable and session and not session.in_transaction: + session._start_retryable_write() + if bulk: + bulk.started_retryable_write = True + + while True: + if is_retrying(): + remaining = _csot.remaining() + if remaining is not None and remaining <= 0: + assert last_error is not None + raise last_error + try: + server = self._select_server(writable_server_selector, session) + supports_session = ( + session is not None and server.description.retryable_writes_supported + ) + with self._checkout(server, session) as conn: + max_wire_version = conn.max_wire_version + if retryable and not supports_session: + if is_retrying(): + # A retry is not possible because this server does + # not support sessions raise the last error. + assert last_error is not None + raise last_error + retryable = False + return func(session, conn, retryable) + except ServerSelectionTimeoutError: + if is_retrying(): + # The application may think the write was never attempted + # if we raise ServerSelectionTimeoutError on the retry + # attempt. Raise the original exception instead. + assert last_error is not None + raise last_error + # A ServerSelectionTimeoutError error indicates that there may + # be a persistent outage. Attempting to retry in this case will + # most likely be a waste of time. + raise + except PyMongoError as exc: + if not retryable: + raise + assert session + # Add the RetryableWriteError label, if applicable. + _add_retryable_write_error(exc, max_wire_version) + retryable_error = exc.has_error_label("RetryableWriteError") + if retryable_error: + session._unpin() + if not retryable_error or (is_retrying() and not multiple_retries): + if exc.has_error_label("NoWritesPerformed") and last_error: + raise last_error from exc + else: + raise + if bulk: + bulk.retrying = True + else: + retrying = True + if not exc.has_error_label("NoWritesPerformed"): + last_error = exc + if last_error is None: + last_error = exc + + @_csot.apply + def _retryable_read( + self, + func: Callable[[Optional[ClientSession], Server, Connection, _ServerMode], T], + read_pref: _ServerMode, + session: Optional[ClientSession], + address: Optional[_Address] = None, + retryable: bool = True, + ) -> T: + """Execute an operation with at most one consecutive retries + + Returns func()'s return value on success. On error retries the same + command once. + + Re-raises any exception thrown by func(). + """ + retryable = ( + retryable and self.options.retry_reads and not (session and session.in_transaction) + ) + last_error: Optional[Exception] = None + retrying = False + multiple_retries = _csot.get_timeout() is not None + + while True: + if retrying: + remaining = _csot.remaining() + if remaining is not None and remaining <= 0: + assert last_error is not None + raise last_error + try: + server = self._select_server(read_pref, session, address=address) + with self._conn_from_server(read_pref, server, session) as ( + conn, + read_pref, + ): + if retrying and not retryable: + # A retry is not possible because this server does + # not support retryable reads, raise the last error. + assert last_error is not None + raise last_error + return func(session, server, conn, read_pref) + except ServerSelectionTimeoutError: + if retrying: + # The application may think the write was never attempted + # if we raise ServerSelectionTimeoutError on the retry + # attempt. Raise the original exception instead. + assert last_error is not None + raise last_error + # A ServerSelectionTimeoutError error indicates that there may + # be a persistent outage. Attempting to retry in this case will + # most likely be a waste of time. + raise + except ConnectionFailure as exc: + if not retryable or (retrying and not multiple_retries): + raise + retrying = True + last_error = exc + except OperationFailure as exc: + if not retryable or (retrying and not multiple_retries): + raise + if exc.code not in helpers._RETRYABLE_ERROR_CODES: + raise + retrying = True + last_error = exc + + def _retryable_write( + self, + retryable: bool, + func: Callable[[Optional[ClientSession], Connection, bool], T], + session: Optional[ClientSession], + ) -> T: + """Internal retryable write helper.""" + with self._tmp_session(session) as s: + return self._retry_with_session(retryable, func, s, None) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, self.__class__): + return self._topology == other._topology + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __hash__(self) -> int: + return hash(self._topology) + + def _repr_helper(self) -> str: + def option_repr(option: str, value: Any) -> str: + """Fix options whose __repr__ isn't usable in a constructor.""" + if option == "document_class": + if value is dict: + return "document_class=dict" + else: + return f"document_class={value.__module__}.{value.__name__}" + if option in common.TIMEOUT_OPTIONS and value is not None: + return f"{option}={int(value * 1000)}" + + return f"{option}={value!r}" + + # Host first... + options = [ + "host=%r" + % [ + "%s:%d" % (host, port) if port is not None else host + for host, port in self._topology_settings.seeds + ] + ] + # ... then everything in self._constructor_args... + options.extend( + option_repr(key, self.__options._options[key]) for key in self._constructor_args + ) + # ... then everything else. + options.extend( + option_repr(key, self.__options._options[key]) + for key in self.__options._options + if key not in set(self._constructor_args) and key != "username" and key != "password" + ) + return ", ".join(options) + + def __repr__(self) -> str: + return f"MongoClient({self._repr_helper()})" + + def __getattr__(self, name: str) -> database.Database[_DocumentType]: + """Get a database by name. + + Raises :class:`~pymongo.errors.InvalidName` if an invalid + database name is used. + + :Parameters: + - `name`: the name of the database to get + """ + if name.startswith("_"): + raise AttributeError( + "MongoClient has no attribute {!r}. To access the {}" + " database, use client[{!r}].".format(name, name, name) + ) + return self.__getitem__(name) + + def __getitem__(self, name: str) -> database.Database[_DocumentType]: + """Get a database by name. + + Raises :class:`~pymongo.errors.InvalidName` if an invalid + database name is used. + + :Parameters: + - `name`: the name of the database to get + """ + return database.Database(self, name) + + def _cleanup_cursor( + self, + locks_allowed: bool, + cursor_id: int, + address: Optional[_CursorAddress], + conn_mgr: _ConnectionManager, + session: Optional[ClientSession], + explicit_session: bool, + ) -> None: + """Cleanup a cursor from cursor.close() or __del__. + + This method handles cleanup for Cursors/CommandCursors including any + pinned connection or implicit session attached at the time the cursor + was closed or garbage collected. + + :Parameters: + - `locks_allowed`: True if we are allowed to acquire locks. + - `cursor_id`: The cursor id which may be 0. + - `address`: The _CursorAddress. + - `conn_mgr`: The _ConnectionManager for the pinned connection or None. + - `session`: The cursor's session. + - `explicit_session`: True if the session was passed explicitly. + """ + if locks_allowed: + if cursor_id: + if conn_mgr and conn_mgr.more_to_come: + # If this is an exhaust cursor and we haven't completely + # exhausted the result set we *must* close the socket + # to stop the server from sending more data. + assert conn_mgr.conn is not None + conn_mgr.conn.close_conn(ConnectionClosedReason.ERROR) + else: + self._close_cursor_now(cursor_id, address, session=session, conn_mgr=conn_mgr) + if conn_mgr: + conn_mgr.close() + else: + # The cursor will be closed later in a different session. + if cursor_id or conn_mgr: + self._close_cursor_soon(cursor_id, address, conn_mgr) + if session and not explicit_session: + session._end_session(lock=locks_allowed) + + def _close_cursor_soon( + self, + cursor_id: int, + address: Optional[_CursorAddress], + conn_mgr: Optional[_ConnectionManager] = None, + ) -> None: + """Request that a cursor and/or connection be cleaned up soon.""" + self.__kill_cursors_queue.append((address, cursor_id, conn_mgr)) + + def _close_cursor_now( + self, + cursor_id: int, + address: Optional[_CursorAddress], + session: Optional[ClientSession] = None, + conn_mgr: Optional[_ConnectionManager] = None, + ) -> None: + """Send a kill cursors message with the given id. + + The cursor is closed synchronously on the current thread. + """ + if not isinstance(cursor_id, int): + raise TypeError("cursor_id must be an instance of int") + + try: + if conn_mgr: + with conn_mgr.lock: + # Cursor is pinned to LB outside of a transaction. + assert address is not None + assert conn_mgr.conn is not None + self._kill_cursor_impl([cursor_id], address, session, conn_mgr.conn) + else: + self._kill_cursors([cursor_id], address, self._get_topology(), session) + except PyMongoError: + # Make another attempt to kill the cursor later. + self._close_cursor_soon(cursor_id, address) + + def _kill_cursors( + self, + cursor_ids: Sequence[int], + address: Optional[_CursorAddress], + topology: Topology, + session: Optional[ClientSession], + ) -> None: + """Send a kill cursors message with the given ids.""" + if address: + # address could be a tuple or _CursorAddress, but + # select_server_by_address needs (host, port). + server = topology.select_server_by_address(tuple(address)) # type: ignore[arg-type] + else: + # Application called close_cursor() with no address. + server = topology.select_server(writable_server_selector) + + with self._checkout(server, session) as conn: + assert address is not None + self._kill_cursor_impl(cursor_ids, address, session, conn) + + def _kill_cursor_impl( + self, + cursor_ids: Sequence[int], + address: _CursorAddress, + session: Optional[ClientSession], + conn: Connection, + ) -> None: + namespace = address.namespace + db, coll = namespace.split(".", 1) + spec = SON([("killCursors", coll), ("cursors", cursor_ids)]) + conn.command(db, spec, session=session, client=self) + + def _process_kill_cursors(self) -> None: + """Process any pending kill cursors requests.""" + address_to_cursor_ids = defaultdict(list) + pinned_cursors = [] + + # Other threads or the GC may append to the queue concurrently. + while True: + try: + address, cursor_id, conn_mgr = self.__kill_cursors_queue.pop() + except IndexError: + break + + if conn_mgr: + pinned_cursors.append((address, cursor_id, conn_mgr)) + else: + address_to_cursor_ids[address].append(cursor_id) + + for address, cursor_id, conn_mgr in pinned_cursors: + try: + self._cleanup_cursor(True, cursor_id, address, conn_mgr, None, False) + except Exception as exc: + if isinstance(exc, InvalidOperation) and self._topology._closed: + # Raise the exception when client is closed so that it + # can be caught in _process_periodic_tasks + raise + else: + helpers._handle_exception() + + # Don't re-open topology if it's closed and there's no pending cursors. + if address_to_cursor_ids: + topology = self._get_topology() + for address, cursor_ids in address_to_cursor_ids.items(): + try: + self._kill_cursors(cursor_ids, address, topology, session=None) + except Exception as exc: + if isinstance(exc, InvalidOperation) and self._topology._closed: + raise + else: + helpers._handle_exception() + + # This method is run periodically by a background thread. + def _process_periodic_tasks(self) -> None: + """Process any pending kill cursors requests and + maintain connection pool parameters. + """ + try: + self._process_kill_cursors() + self._topology.update_pool() + except Exception as exc: + if isinstance(exc, InvalidOperation) and self._topology._closed: + return + else: + helpers._handle_exception() + + def __start_session(self, implicit: bool, **kwargs: Any) -> ClientSession: + # Raises ConfigurationError if sessions are not supported. + if implicit: + self._topology._check_implicit_session_support() + server_session: Union[_EmptyServerSession, _ServerSession] = _EmptyServerSession() + else: + server_session = self._get_server_session() + opts = client_session.SessionOptions(**kwargs) + return client_session.ClientSession(self, server_session, opts, implicit) + + def start_session( + self, + causal_consistency: Optional[bool] = None, + default_transaction_options: Optional[client_session.TransactionOptions] = None, + snapshot: Optional[bool] = False, + ) -> client_session.ClientSession: + """Start a logical session. + + This method takes the same parameters as + :class:`~pymongo.client_session.SessionOptions`. See the + :mod:`~pymongo.client_session` module for details and examples. + + A :class:`~pymongo.client_session.ClientSession` may only be used with + the MongoClient that started it. :class:`ClientSession` instances are + **not thread-safe or fork-safe**. They can only be used by one thread + or process at a time. A single :class:`ClientSession` cannot be used + to run multiple operations concurrently. + + :Returns: + An instance of :class:`~pymongo.client_session.ClientSession`. + + .. versionadded:: 3.6 + """ + return self.__start_session( + False, + causal_consistency=causal_consistency, + default_transaction_options=default_transaction_options, + snapshot=snapshot, + ) + + def _get_server_session(self) -> _ServerSession: + """Internal: start or resume a _ServerSession.""" + return self._topology.get_server_session() + + def _return_server_session( + self, server_session: Union[_ServerSession, _EmptyServerSession], lock: bool + ) -> None: + """Internal: return a _ServerSession to the pool.""" + if isinstance(server_session, _EmptyServerSession): + return None + return self._topology.return_server_session(server_session, lock) + + def _ensure_session(self, session: Optional[ClientSession] = None) -> Optional[ClientSession]: + """If provided session is None, lend a temporary session.""" + if session: + return session + + try: + # Don't make implicit sessions causally consistent. Applications + # should always opt-in. + return self.__start_session(True, causal_consistency=False) + except (ConfigurationError, InvalidOperation): + # Sessions not supported. + return None + + @contextlib.contextmanager + def _tmp_session( + self, session: Optional[client_session.ClientSession], close: bool = True + ) -> Generator[Optional[client_session.ClientSession], None, None]: + """If provided session is None, lend a temporary session.""" + if session is not None: + if not isinstance(session, client_session.ClientSession): + raise ValueError("'session' argument must be a ClientSession or None.") + # Don't call end_session. + yield session + return + + s = self._ensure_session(session) + if s: + try: + yield s + except Exception as exc: + if isinstance(exc, ConnectionFailure): + s._server_session.mark_dirty() + + # Always call end_session on error. + s.end_session() + raise + finally: + # Call end_session when we exit this scope. + if close: + s.end_session() + else: + yield None + + def _send_cluster_time( + self, command: MutableMapping[str, Any], session: Optional[ClientSession] + ) -> None: + topology_time = self._topology.max_cluster_time() + session_time = session.cluster_time if session else None + if topology_time and session_time: + if topology_time["clusterTime"] > session_time["clusterTime"]: + cluster_time: Optional[ClusterTime] = topology_time + else: + cluster_time = session_time + else: + cluster_time = topology_time or session_time + if cluster_time: + command["$clusterTime"] = cluster_time + + def _process_response(self, reply: Mapping[str, Any], session: Optional[ClientSession]) -> None: + self._topology.receive_cluster_time(reply.get("$clusterTime")) + if session is not None: + session._process_response(reply) + + def server_info(self, session: Optional[client_session.ClientSession] = None) -> Dict[str, Any]: + """Get information about the MongoDB server we're connected to. + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + """ + return cast( + dict, + self.admin.command( + "buildinfo", read_preference=ReadPreference.PRIMARY, session=session + ), + ) + + def list_databases( + self, + session: Optional[client_session.ClientSession] = None, + comment: Optional[Any] = None, + **kwargs: Any, + ) -> CommandCursor[Dict[str, Any]]: + """Get a cursor over the databases of the connected server. + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + - `**kwargs` (optional): Optional parameters of the + `listDatabases command + <https://mongodb.com/docs/manual/reference/command/listDatabases/>`_ + can be passed as keyword arguments to this method. The supported + options differ by server version. + + + :Returns: + An instance of :class:`~pymongo.command_cursor.CommandCursor`. + + .. versionadded:: 3.6 + """ + cmd = SON([("listDatabases", 1)]) + cmd.update(kwargs) + if comment is not None: + cmd["comment"] = comment + admin = self._database_default_options("admin") + res = admin._retryable_read_command(cmd, session=session) + # listDatabases doesn't return a cursor (yet). Fake one. + cursor = { + "id": 0, + "firstBatch": res["databases"], + "ns": "admin.$cmd", + } + return CommandCursor(admin["$cmd"], cursor, None, comment=comment) + + def list_database_names( + self, + session: Optional[client_session.ClientSession] = None, + comment: Optional[Any] = None, + ) -> List[str]: + """Get a list of the names of all databases on the connected server. + + :Parameters: + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionadded:: 3.6 + """ + return [doc["name"] for doc in self.list_databases(session, nameOnly=True, comment=comment)] + + @_csot.apply + def drop_database( + self, + name_or_database: Union[str, database.Database[_DocumentTypeArg]], + session: Optional[client_session.ClientSession] = None, + comment: Optional[Any] = None, + ) -> None: + """Drop a database. + + Raises :class:`TypeError` if `name_or_database` is not an instance of + :class:`str` or :class:`~pymongo.database.Database`. + + :Parameters: + - `name_or_database`: the name of a database to drop, or a + :class:`~pymongo.database.Database` instance representing the + database to drop + - `session` (optional): a + :class:`~pymongo.client_session.ClientSession`. + - `comment` (optional): A user-provided comment to attach to this + command. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.6 + Added ``session`` parameter. + + .. note:: The :attr:`~pymongo.mongo_client.MongoClient.write_concern` of + this client is automatically applied to this operation. + + .. versionchanged:: 3.4 + Apply this client's write concern automatically to this operation + when connected to MongoDB >= 3.4. + + """ + name = name_or_database + if isinstance(name, database.Database): + name = name.name + + if not isinstance(name, str): + raise TypeError("name_or_database must be an instance of str or a Database") + + with self._conn_for_writes(session) as conn: + self[name]._command( + conn, + {"dropDatabase": 1, "comment": comment}, + read_preference=ReadPreference.PRIMARY, + write_concern=self._write_concern_for(session), + parse_write_concern_error=True, + session=session, + ) + + def get_default_database( + self, + default: Optional[str] = None, + codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, + read_preference: Optional[_ServerMode] = None, + write_concern: Optional[WriteConcern] = None, + read_concern: Optional[ReadConcern] = None, + ) -> database.Database[_DocumentType]: + """Get the database named in the MongoDB connection URI. + + >>> uri = 'mongodb://host/my_database' + >>> client = MongoClient(uri) + >>> db = client.get_default_database() + >>> assert db.name == 'my_database' + >>> db = client.get_database() + >>> assert db.name == 'my_database' + + Useful in scripts where you want to choose which database to use + based only on the URI in a configuration file. + + :Parameters: + - `default` (optional): the database name to use if no database name + was provided in the URI. + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. If ``None`` (the + default) the :attr:`codec_options` of this :class:`MongoClient` is + used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) the :attr:`read_preference` of this + :class:`MongoClient` is used. See :mod:`~pymongo.read_preferences` + for options. + - `write_concern` (optional): An instance of + :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the + default) the :attr:`write_concern` of this :class:`MongoClient` is + used. + - `read_concern` (optional): An instance of + :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the + default) the :attr:`read_concern` of this :class:`MongoClient` is + used. + - `comment` (optional): A user-provided comment to attach to this + command. + + .. versionchanged:: 4.1 + Added ``comment`` parameter. + + .. versionchanged:: 3.8 + Undeprecated. Added the ``default``, ``codec_options``, + ``read_preference``, ``write_concern`` and ``read_concern`` + parameters. + + .. versionchanged:: 3.5 + Deprecated, use :meth:`get_database` instead. + """ + if self.__default_database_name is None and default is None: + raise ConfigurationError("No default database name defined or provided.") + + name = cast(str, self.__default_database_name or default) + return database.Database( + self, name, codec_options, read_preference, write_concern, read_concern + ) + + def get_database( + self, + name: Optional[str] = None, + codec_options: Optional[bson.CodecOptions[_DocumentTypeArg]] = None, + read_preference: Optional[_ServerMode] = None, + write_concern: Optional[WriteConcern] = None, + read_concern: Optional[ReadConcern] = None, + ) -> database.Database[_DocumentType]: + """Get a :class:`~pymongo.database.Database` with the given name and + options. + + Useful for creating a :class:`~pymongo.database.Database` with + different codec options, read preference, and/or write concern from + this :class:`MongoClient`. + + >>> client.read_preference + Primary() + >>> db1 = client.test + >>> db1.read_preference + Primary() + >>> from pymongo import ReadPreference + >>> db2 = client.get_database( + ... 'test', read_preference=ReadPreference.SECONDARY) + >>> db2.read_preference + Secondary(tag_sets=None) + + :Parameters: + - `name` (optional): The name of the database - a string. If ``None`` + (the default) the database named in the MongoDB connection URI is + returned. + - `codec_options` (optional): An instance of + :class:`~bson.codec_options.CodecOptions`. If ``None`` (the + default) the :attr:`codec_options` of this :class:`MongoClient` is + used. + - `read_preference` (optional): The read preference to use. If + ``None`` (the default) the :attr:`read_preference` of this + :class:`MongoClient` is used. See :mod:`~pymongo.read_preferences` + for options. + - `write_concern` (optional): An instance of + :class:`~pymongo.write_concern.WriteConcern`. If ``None`` (the + default) the :attr:`write_concern` of this :class:`MongoClient` is + used. + - `read_concern` (optional): An instance of + :class:`~pymongo.read_concern.ReadConcern`. If ``None`` (the + default) the :attr:`read_concern` of this :class:`MongoClient` is + used. + + .. versionchanged:: 3.5 + The `name` parameter is now optional, defaulting to the database + named in the MongoDB connection URI. + """ + if name is None: + if self.__default_database_name is None: + raise ConfigurationError("No default database defined") + name = self.__default_database_name + + return database.Database( + self, name, codec_options, read_preference, write_concern, read_concern + ) + + def _database_default_options(self, name: str) -> Database: + """Get a Database instance with the default settings.""" + return self.get_database( + name, + codec_options=DEFAULT_CODEC_OPTIONS, + read_preference=ReadPreference.PRIMARY, + write_concern=DEFAULT_WRITE_CONCERN, + ) + + def __enter__(self) -> "MongoClient[_DocumentType]": + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + self.close() + + # See PYTHON-3084. + __iter__ = None + + def __next__(self) -> NoReturn: + raise TypeError("'MongoClient' object is not iterable") + + next = __next__ + + +def _retryable_error_doc(exc: PyMongoError) -> Optional[Mapping[str, Any]]: + """Return the server response from PyMongo exception or None.""" + if isinstance(exc, BulkWriteError): + # Check the last writeConcernError to determine if this + # BulkWriteError is retryable. + wces = exc.details["writeConcernErrors"] + wce = wces[-1] if wces else None + return wce + if isinstance(exc, (NotPrimaryError, OperationFailure)): + return cast(Mapping[str, Any], exc.details) + return None + + +def _add_retryable_write_error(exc: PyMongoError, max_wire_version: int) -> None: + doc = _retryable_error_doc(exc) + if doc: + code = doc.get("code", 0) + # retryWrites on MMAPv1 should raise an actionable error. + if code == 20 and str(exc).startswith("Transaction numbers"): + errmsg = ( + "This MongoDB deployment does not support " + "retryable writes. Please add retryWrites=false " + "to your connection string." + ) + raise OperationFailure(errmsg, code, exc.details) # type: ignore[attr-defined] + if max_wire_version >= 9: + # In MongoDB 4.4+, the server reports the error labels. + for label in doc.get("errorLabels", []): + exc._add_error_label(label) + else: + if code in helpers._RETRYABLE_ERROR_CODES: + exc._add_error_label("RetryableWriteError") + + # Connection errors are always retryable except NotPrimaryError and WaitQueueTimeoutError which is + # handled above. + if isinstance(exc, ConnectionFailure) and not isinstance( + exc, (NotPrimaryError, WaitQueueTimeoutError) + ): + exc._add_error_label("RetryableWriteError") + + +class _MongoClientErrorHandler: + """Handle errors raised when executing an operation.""" + + __slots__ = ( + "client", + "server_address", + "session", + "max_wire_version", + "sock_generation", + "completed_handshake", + "service_id", + "handled", + ) + + def __init__(self, client: MongoClient, server: Server, session: Optional[ClientSession]): + self.client = client + self.server_address = server.description.address + self.session = session + self.max_wire_version = common.MIN_WIRE_VERSION + # XXX: When get_socket fails, this generation could be out of date: + # "Note that when a network error occurs before the handshake + # completes then the error's generation number is the generation + # of the pool at the time the connection attempt was started." + self.sock_generation = server.pool.gen.get_overall() + self.completed_handshake = False + self.service_id: Optional[ObjectId] = None + self.handled = False + + def contribute_socket(self, conn: Connection, completed_handshake: bool = True) -> None: + """Provide socket information to the error handler.""" + self.max_wire_version = conn.max_wire_version + self.sock_generation = conn.generation + self.service_id = conn.service_id + self.completed_handshake = completed_handshake + + def handle( + self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException] + ) -> None: + if self.handled or exc_val is None: + return + self.handled = True + if self.session: + if isinstance(exc_val, ConnectionFailure): + if self.session.in_transaction: + exc_val._add_error_label("TransientTransactionError") + self.session._server_session.mark_dirty() + + if isinstance(exc_val, PyMongoError): + if exc_val.has_error_label("TransientTransactionError") or exc_val.has_error_label( + "RetryableWriteError" + ): + self.session._unpin() + err_ctx = _ErrorContext( + exc_val, + self.max_wire_version, + self.sock_generation, + self.completed_handshake, + self.service_id, + ) + self.client._topology.handle_error(self.server_address, err_ctx) + + def __enter__(self) -> _MongoClientErrorHandler: + return self + + def __exit__( + self, + exc_type: Optional[Type[Exception]], + exc_val: Optional[Exception], + exc_tb: Optional[TracebackType], + ) -> None: + return self.handle(exc_type, exc_val) + + +def _after_fork_child() -> None: + """Releases the locks in child process and resets the + topologies in all MongoClients. + """ + # Reinitialize locks + _release_locks() + + # Perform cleanup in clients (i.e. get rid of topology) + for _, client in MongoClient._clients.items(): + client._after_fork() + + +if _HAS_REGISTER_AT_FORK: + # This will run in the same thread as the fork was called. + # If we fork in a critical region on the same thread, it should break. + # This is fine since we would never call fork directly from a critical region. + os.register_at_fork(after_in_child=_after_fork_child) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/monitor.py b/backend/test/lib/python3.8/site-packages/pymongo/monitor.py new file mode 100644 index 0000000000000000000000000000000000000000..b2ff3404f9eafaaad310e779f2dbd3f648bc0e4f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/monitor.py @@ -0,0 +1,461 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Class to monitor a MongoDB server on a background thread.""" + +from __future__ import annotations + +import atexit +import time +import weakref +from typing import TYPE_CHECKING, Any, List, Mapping, Optional, Tuple, cast + +from pymongo import common, periodic_executor +from pymongo._csot import MovingMinimum +from pymongo.errors import NotPrimaryError, OperationFailure, _OperationCancelled +from pymongo.hello import Hello +from pymongo.lock import _create_lock +from pymongo.periodic_executor import _shutdown_executors +from pymongo.read_preferences import MovingAverage +from pymongo.server_description import ServerDescription +from pymongo.srv_resolver import _SrvResolver + +if TYPE_CHECKING: + from pymongo.pool import Connection, Pool, _CancellationContext + from pymongo.settings import TopologySettings + from pymongo.topology import Topology + + +def _sanitize(error: Exception) -> None: + """PYTHON-2433 Clear error traceback info.""" + error.__traceback__ = None + error.__context__ = None + error.__cause__ = None + + +class MonitorBase: + def __init__(self, topology: Topology, name: str, interval: int, min_interval: float): + """Base class to do periodic work on a background thread. + + The background thread is signaled to stop when the Topology or + this instance is freed. + """ + # We strongly reference the executor and it weakly references us via + # this closure. When the monitor is freed, stop the executor soon. + def target() -> bool: + monitor = self_ref() + if monitor is None: + return False # Stop the executor. + monitor._run() # type:ignore[attr-defined] + return True + + executor = periodic_executor.PeriodicExecutor( + interval=interval, min_interval=min_interval, target=target, name=name + ) + + self._executor = executor + + def _on_topology_gc(dummy: Optional[Topology] = None) -> None: + # This prevents GC from waiting 10 seconds for hello to complete + # See test_cleanup_executors_on_client_del. + monitor = self_ref() + if monitor: + monitor.gc_safe_close() + + # Avoid cycles. When self or topology is freed, stop executor soon. + self_ref = weakref.ref(self, executor.close) + self._topology = weakref.proxy(topology, _on_topology_gc) + _register(self) + + def open(self) -> None: + """Start monitoring, or restart after a fork. + + Multiple calls have no effect. + """ + self._executor.open() + + def gc_safe_close(self) -> None: + """GC safe close.""" + self._executor.close() + + def close(self) -> None: + """Close and stop monitoring. + + open() restarts the monitor after closing. + """ + self.gc_safe_close() + + def join(self, timeout: Optional[int] = None) -> None: + """Wait for the monitor to stop.""" + self._executor.join(timeout) + + def request_check(self) -> None: + """If the monitor is sleeping, wake it soon.""" + self._executor.wake() + + +class Monitor(MonitorBase): + def __init__( + self, + server_description: ServerDescription, + topology: Topology, + pool: Pool, + topology_settings: TopologySettings, + ): + """Class to monitor a MongoDB server on a background thread. + + Pass an initial ServerDescription, a Topology, a Pool, and + TopologySettings. + + The Topology is weakly referenced. The Pool must be exclusive to this + Monitor. + """ + super().__init__( + topology, + "pymongo_server_monitor_thread", + topology_settings.heartbeat_frequency, + common.MIN_HEARTBEAT_INTERVAL, + ) + self._server_description = server_description + self._pool = pool + self._settings = topology_settings + self._listeners = self._settings._pool_options._event_listeners + self._publish = self._listeners is not None and self._listeners.enabled_for_server_heartbeat + self._cancel_context: Optional[_CancellationContext] = None + self._rtt_monitor = _RttMonitor( + topology, + topology_settings, + topology._create_pool_for_monitor(server_description.address), + ) + self.heartbeater = None + + def cancel_check(self) -> None: + """Cancel any concurrent hello check. + + Note: this is called from a weakref.proxy callback and MUST NOT take + any locks. + """ + context = self._cancel_context + if context: + # Note: we cannot close the socket because doing so may cause + # concurrent reads/writes to hang until a timeout occurs + # (depending on the platform). + context.cancel() + + def _start_rtt_monitor(self) -> None: + """Start an _RttMonitor that periodically runs ping.""" + # If this monitor is closed directly before (or during) this open() + # call, the _RttMonitor will not be closed. Checking if this monitor + # was closed directly after resolves the race. + self._rtt_monitor.open() + if self._executor._stopped: + self._rtt_monitor.close() + + def gc_safe_close(self) -> None: + self._executor.close() + self._rtt_monitor.gc_safe_close() + self.cancel_check() + + def close(self) -> None: + self.gc_safe_close() + self._rtt_monitor.close() + # Increment the generation and maybe close the socket. If the executor + # thread has the socket checked out, it will be closed when checked in. + self._reset_connection() + + def _reset_connection(self) -> None: + # Clear our pooled connection. + self._pool.reset() + + def _run(self) -> None: + try: + prev_sd = self._server_description + try: + self._server_description = self._check_server() + except _OperationCancelled as exc: + _sanitize(exc) + # Already closed the connection, wait for the next check. + self._server_description = ServerDescription( + self._server_description.address, error=exc + ) + if prev_sd.is_server_type_known: + # Immediately retry since we've already waited 500ms to + # discover that we've been cancelled. + self._executor.skip_sleep() + return + + # Update the Topology and clear the server pool on error. + self._topology.on_change( + self._server_description, reset_pool=self._server_description.error + ) + + if ( + self._server_description.is_server_type_known + and self._server_description.topology_version + ): + self._start_rtt_monitor() + # Immediately check for the next streaming response. + self._executor.skip_sleep() + + if self._server_description.error and prev_sd.is_server_type_known: + # Immediately retry on network errors. + self._executor.skip_sleep() + except ReferenceError: + # Topology was garbage-collected. + self.close() + + def _check_server(self) -> ServerDescription: + """Call hello or read the next streaming response. + + Returns a ServerDescription. + """ + start = time.monotonic() + try: + try: + return self._check_once() + except (OperationFailure, NotPrimaryError) as exc: + # Update max cluster time even when hello fails. + details = cast(Mapping[str, Any], exc.details) + self._topology.receive_cluster_time(details.get("$clusterTime")) + raise + except ReferenceError: + raise + except Exception as error: + _sanitize(error) + sd = self._server_description + address = sd.address + duration = time.monotonic() - start + if self._publish: + awaited = bool(sd.is_server_type_known and sd.topology_version) + assert self._listeners is not None + self._listeners.publish_server_heartbeat_failed(address, duration, error, awaited) + self._reset_connection() + if isinstance(error, _OperationCancelled): + raise + self._rtt_monitor.reset() + # Server type defaults to Unknown. + return ServerDescription(address, error=error) + + def _check_once(self) -> ServerDescription: + """A single attempt to call hello. + + Returns a ServerDescription, or raises an exception. + """ + address = self._server_description.address + if self._publish: + assert self._listeners is not None + self._listeners.publish_server_heartbeat_started(address) + + if self._cancel_context and self._cancel_context.cancelled: + self._reset_connection() + with self._pool.checkout() as conn: + self._cancel_context = conn.cancel_context + response, round_trip_time = self._check_with_socket(conn) + if not response.awaitable: + self._rtt_monitor.add_sample(round_trip_time) + + avg_rtt, min_rtt = self._rtt_monitor.get() + sd = ServerDescription(address, response, avg_rtt, min_round_trip_time=min_rtt) + if self._publish: + assert self._listeners is not None + self._listeners.publish_server_heartbeat_succeeded( + address, round_trip_time, response, response.awaitable + ) + return sd + + def _check_with_socket(self, conn: Connection) -> Tuple[Hello, float]: + """Return (Hello, round_trip_time). + + Can raise ConnectionFailure or OperationFailure. + """ + cluster_time = self._topology.max_cluster_time() + start = time.monotonic() + if conn.more_to_come: + # Read the next streaming hello (MongoDB 4.4+). + response = Hello(conn._next_reply(), awaitable=True) + elif conn.performed_handshake and self._server_description.topology_version: + # Initiate streaming hello (MongoDB 4.4+). + response = conn._hello( + cluster_time, + self._server_description.topology_version, + self._settings.heartbeat_frequency, + ) + else: + # New connection handshake or polling hello (MongoDB <4.4). + response = conn._hello(cluster_time, None, None) + return response, time.monotonic() - start + + +class SrvMonitor(MonitorBase): + def __init__(self, topology: Topology, topology_settings: TopologySettings): + """Class to poll SRV records on a background thread. + + Pass a Topology and a TopologySettings. + + The Topology is weakly referenced. + """ + super().__init__( + topology, + "pymongo_srv_polling_thread", + common.MIN_SRV_RESCAN_INTERVAL, + topology_settings.heartbeat_frequency, + ) + self._settings = topology_settings + self._seedlist = self._settings._seeds + assert isinstance(self._settings.fqdn, str) + self._fqdn: str = self._settings.fqdn + + def _run(self) -> None: + seedlist = self._get_seedlist() + if seedlist: + self._seedlist = seedlist + try: + self._topology.on_srv_update(self._seedlist) + except ReferenceError: + # Topology was garbage-collected. + self.close() + + def _get_seedlist(self) -> Optional[List[Tuple[str, Any]]]: + """Poll SRV records for a seedlist. + + Returns a list of ServerDescriptions. + """ + try: + resolver = _SrvResolver( + self._fqdn, + self._settings.pool_options.connect_timeout, + self._settings.srv_service_name, + ) + seedlist, ttl = resolver.get_hosts_and_min_ttl() + if len(seedlist) == 0: + # As per the spec: this should be treated as a failure. + raise Exception + except Exception: + # As per the spec, upon encountering an error: + # - An error must not be raised + # - SRV records must be rescanned every heartbeatFrequencyMS + # - Topology must be left unchanged + self.request_check() + return None + else: + self._executor.update_interval(max(ttl, common.MIN_SRV_RESCAN_INTERVAL)) + return seedlist + + +class _RttMonitor(MonitorBase): + def __init__(self, topology: Topology, topology_settings: TopologySettings, pool: Pool): + """Maintain round trip times for a server. + + The Topology is weakly referenced. + """ + super().__init__( + topology, + "pymongo_server_rtt_thread", + topology_settings.heartbeat_frequency, + common.MIN_HEARTBEAT_INTERVAL, + ) + + self._pool = pool + self._moving_average = MovingAverage() + self._moving_min = MovingMinimum() + self._lock = _create_lock() + + def close(self) -> None: + self.gc_safe_close() + # Increment the generation and maybe close the socket. If the executor + # thread has the socket checked out, it will be closed when checked in. + self._pool.reset() + + def add_sample(self, sample: float) -> None: + """Add a RTT sample.""" + with self._lock: + self._moving_average.add_sample(sample) + self._moving_min.add_sample(sample) + + def get(self) -> Tuple[Optional[float], float]: + """Get the calculated average, or None if no samples yet and the min.""" + with self._lock: + return self._moving_average.get(), self._moving_min.get() + + def reset(self) -> None: + """Reset the average RTT.""" + with self._lock: + self._moving_average.reset() + self._moving_min.reset() + + def _run(self) -> None: + try: + # NOTE: This thread is only run when using the streaming + # heartbeat protocol (MongoDB 4.4+). + # XXX: Skip check if the server is unknown? + rtt = self._ping() + self.add_sample(rtt) + except ReferenceError: + # Topology was garbage-collected. + self.close() + except Exception: + self._pool.reset() + + def _ping(self) -> float: + """Run a "hello" command and return the RTT.""" + with self._pool.checkout() as conn: + if self._executor._stopped: + raise Exception("_RttMonitor closed") + start = time.monotonic() + conn.hello() + return time.monotonic() - start + + +# Close monitors to cancel any in progress streaming checks before joining +# executor threads. For an explanation of how this works see the comment +# about _EXECUTORS in periodic_executor.py. +_MONITORS = set() + + +def _register(monitor: MonitorBase) -> None: + ref = weakref.ref(monitor, _unregister) + _MONITORS.add(ref) + + +def _unregister(monitor_ref: weakref.ReferenceType[MonitorBase]) -> None: + _MONITORS.remove(monitor_ref) + + +def _shutdown_monitors() -> None: + if _MONITORS is None: + return + + # Copy the set. Closing monitors removes them. + monitors = list(_MONITORS) + + # Close all monitors. + for ref in monitors: + monitor = ref() + if monitor: + monitor.gc_safe_close() + + monitor = None + + +def _shutdown_resources() -> None: + # _shutdown_monitors/_shutdown_executors may already be GC'd at shutdown. + shutdown = _shutdown_monitors + if shutdown: # type:ignore[truthy-function] + shutdown() + shutdown = _shutdown_executors + if shutdown: # type:ignore[truthy-function] + shutdown() + + +atexit.register(_shutdown_resources) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/monitoring.py b/backend/test/lib/python3.8/site-packages/pymongo/monitoring.py new file mode 100644 index 0000000000000000000000000000000000000000..73e15821d3c613ba68385b201eaff2633d6b843f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/monitoring.py @@ -0,0 +1,1824 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Tools to monitor driver events. + +.. versionadded:: 3.1 + +.. attention:: Starting in PyMongo 3.11, the monitoring classes outlined below + are included in the PyMongo distribution under the + :mod:`~pymongo.event_loggers` submodule. + +Use :func:`register` to register global listeners for specific events. +Listeners must inherit from one of the abstract classes below and implement +the correct functions for that class. + +For example, a simple command logger might be implemented like this:: + + import logging + + from pymongo import monitoring + + class CommandLogger(monitoring.CommandListener): + + def started(self, event): + logging.info("Command {0.command_name} with request id " + "{0.request_id} started on server " + "{0.connection_id}".format(event)) + + def succeeded(self, event): + logging.info("Command {0.command_name} with request id " + "{0.request_id} on server {0.connection_id} " + "succeeded in {0.duration_micros} " + "microseconds".format(event)) + + def failed(self, event): + logging.info("Command {0.command_name} with request id " + "{0.request_id} on server {0.connection_id} " + "failed in {0.duration_micros} " + "microseconds".format(event)) + + monitoring.register(CommandLogger()) + +Server discovery and monitoring events are also available. For example:: + + class ServerLogger(monitoring.ServerListener): + + def opened(self, event): + logging.info("Server {0.server_address} added to topology " + "{0.topology_id}".format(event)) + + def description_changed(self, event): + previous_server_type = event.previous_description.server_type + new_server_type = event.new_description.server_type + if new_server_type != previous_server_type: + # server_type_name was added in PyMongo 3.4 + logging.info( + "Server {0.server_address} changed type from " + "{0.previous_description.server_type_name} to " + "{0.new_description.server_type_name}".format(event)) + + def closed(self, event): + logging.warning("Server {0.server_address} removed from topology " + "{0.topology_id}".format(event)) + + + class HeartbeatLogger(monitoring.ServerHeartbeatListener): + + def started(self, event): + logging.info("Heartbeat sent to server " + "{0.connection_id}".format(event)) + + def succeeded(self, event): + # The reply.document attribute was added in PyMongo 3.4. + logging.info("Heartbeat to server {0.connection_id} " + "succeeded with reply " + "{0.reply.document}".format(event)) + + def failed(self, event): + logging.warning("Heartbeat to server {0.connection_id} " + "failed with error {0.reply}".format(event)) + + class TopologyLogger(monitoring.TopologyListener): + + def opened(self, event): + logging.info("Topology with id {0.topology_id} " + "opened".format(event)) + + def description_changed(self, event): + logging.info("Topology description updated for " + "topology id {0.topology_id}".format(event)) + previous_topology_type = event.previous_description.topology_type + new_topology_type = event.new_description.topology_type + if new_topology_type != previous_topology_type: + # topology_type_name was added in PyMongo 3.4 + logging.info( + "Topology {0.topology_id} changed type from " + "{0.previous_description.topology_type_name} to " + "{0.new_description.topology_type_name}".format(event)) + # The has_writable_server and has_readable_server methods + # were added in PyMongo 3.4. + if not event.new_description.has_writable_server(): + logging.warning("No writable servers available.") + if not event.new_description.has_readable_server(): + logging.warning("No readable servers available.") + + def closed(self, event): + logging.info("Topology with id {0.topology_id} " + "closed".format(event)) + +Connection monitoring and pooling events are also available. For example:: + + class ConnectionPoolLogger(ConnectionPoolListener): + + def pool_created(self, event): + logging.info("[pool {0.address}] pool created".format(event)) + + def pool_ready(self, event): + logging.info("[pool {0.address}] pool is ready".format(event)) + + def pool_cleared(self, event): + logging.info("[pool {0.address}] pool cleared".format(event)) + + def pool_closed(self, event): + logging.info("[pool {0.address}] pool closed".format(event)) + + def connection_created(self, event): + logging.info("[pool {0.address}][connection #{0.connection_id}] " + "connection created".format(event)) + + def connection_ready(self, event): + logging.info("[pool {0.address}][connection #{0.connection_id}] " + "connection setup succeeded".format(event)) + + def connection_closed(self, event): + logging.info("[pool {0.address}][connection #{0.connection_id}] " + "connection closed, reason: " + "{0.reason}".format(event)) + + def connection_check_out_started(self, event): + logging.info("[pool {0.address}] connection check out " + "started".format(event)) + + def connection_check_out_failed(self, event): + logging.info("[pool {0.address}] connection check out " + "failed, reason: {0.reason}".format(event)) + + def connection_checked_out(self, event): + logging.info("[pool {0.address}][connection #{0.connection_id}] " + "connection checked out of pool".format(event)) + + def connection_checked_in(self, event): + logging.info("[pool {0.address}][connection #{0.connection_id}] " + "connection checked into pool".format(event)) + + +Event listeners can also be registered per instance of +:class:`~pymongo.mongo_client.MongoClient`:: + + client = MongoClient(event_listeners=[CommandLogger()]) + +Note that previously registered global listeners are automatically included +when configuring per client event listeners. Registering a new global listener +will not add that listener to existing client instances. + +.. note:: Events are delivered **synchronously**. Application threads block + waiting for event handlers (e.g. :meth:`~CommandListener.started`) to + return. Care must be taken to ensure that your event handlers are efficient + enough to not adversely affect overall application performance. + +.. warning:: The command documents published through this API are *not* copies. + If you intend to modify them in any way you must copy them in your event + handler first. +""" + +from __future__ import annotations + +import datetime +from collections import abc, namedtuple +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Sequence + +from bson.objectid import ObjectId +from pymongo.hello import Hello, HelloCompat +from pymongo.helpers import _handle_exception +from pymongo.typings import _Address, _DocumentOut + +if TYPE_CHECKING: + from datetime import timedelta + + from pymongo.server_description import ServerDescription + from pymongo.topology_description import TopologyDescription + + +_Listeners = namedtuple( + "_Listeners", + ( + "command_listeners", + "server_listeners", + "server_heartbeat_listeners", + "topology_listeners", + "cmap_listeners", + ), +) + +_LISTENERS = _Listeners([], [], [], [], []) + + +class _EventListener: + """Abstract base class for all event listeners.""" + + +class CommandListener(_EventListener): + """Abstract base class for command listeners. + + Handles `CommandStartedEvent`, `CommandSucceededEvent`, + and `CommandFailedEvent`. + """ + + def started(self, event: "CommandStartedEvent") -> None: + """Abstract method to handle a `CommandStartedEvent`. + + :Parameters: + - `event`: An instance of :class:`CommandStartedEvent`. + """ + raise NotImplementedError + + def succeeded(self, event: "CommandSucceededEvent") -> None: + """Abstract method to handle a `CommandSucceededEvent`. + + :Parameters: + - `event`: An instance of :class:`CommandSucceededEvent`. + """ + raise NotImplementedError + + def failed(self, event: "CommandFailedEvent") -> None: + """Abstract method to handle a `CommandFailedEvent`. + + :Parameters: + - `event`: An instance of :class:`CommandFailedEvent`. + """ + raise NotImplementedError + + +class ConnectionPoolListener(_EventListener): + """Abstract base class for connection pool listeners. + + Handles all of the connection pool events defined in the Connection + Monitoring and Pooling Specification: + :class:`PoolCreatedEvent`, :class:`PoolClearedEvent`, + :class:`PoolClosedEvent`, :class:`ConnectionCreatedEvent`, + :class:`ConnectionReadyEvent`, :class:`ConnectionClosedEvent`, + :class:`ConnectionCheckOutStartedEvent`, + :class:`ConnectionCheckOutFailedEvent`, + :class:`ConnectionCheckedOutEvent`, + and :class:`ConnectionCheckedInEvent`. + + .. versionadded:: 3.9 + """ + + def pool_created(self, event: "PoolCreatedEvent") -> None: + """Abstract method to handle a :class:`PoolCreatedEvent`. + + Emitted when a connection Pool is created. + + :Parameters: + - `event`: An instance of :class:`PoolCreatedEvent`. + """ + raise NotImplementedError + + def pool_ready(self, event: "PoolReadyEvent") -> None: + """Abstract method to handle a :class:`PoolReadyEvent`. + + Emitted when a connection Pool is marked ready. + + :Parameters: + - `event`: An instance of :class:`PoolReadyEvent`. + + .. versionadded:: 4.0 + """ + raise NotImplementedError + + def pool_cleared(self, event: "PoolClearedEvent") -> None: + """Abstract method to handle a `PoolClearedEvent`. + + Emitted when a connection Pool is cleared. + + :Parameters: + - `event`: An instance of :class:`PoolClearedEvent`. + """ + raise NotImplementedError + + def pool_closed(self, event: "PoolClosedEvent") -> None: + """Abstract method to handle a `PoolClosedEvent`. + + Emitted when a connection Pool is closed. + + :Parameters: + - `event`: An instance of :class:`PoolClosedEvent`. + """ + raise NotImplementedError + + def connection_created(self, event: "ConnectionCreatedEvent") -> None: + """Abstract method to handle a :class:`ConnectionCreatedEvent`. + + Emitted when a connection Pool creates a Connection object. + + :Parameters: + - `event`: An instance of :class:`ConnectionCreatedEvent`. + """ + raise NotImplementedError + + def connection_ready(self, event: "ConnectionReadyEvent") -> None: + """Abstract method to handle a :class:`ConnectionReadyEvent`. + + Emitted when a connection has finished its setup, and is now ready to + use. + + :Parameters: + - `event`: An instance of :class:`ConnectionReadyEvent`. + """ + raise NotImplementedError + + def connection_closed(self, event: "ConnectionClosedEvent") -> None: + """Abstract method to handle a :class:`ConnectionClosedEvent`. + + Emitted when a connection Pool closes a connection. + + :Parameters: + - `event`: An instance of :class:`ConnectionClosedEvent`. + """ + raise NotImplementedError + + def connection_check_out_started(self, event: "ConnectionCheckOutStartedEvent") -> None: + """Abstract method to handle a :class:`ConnectionCheckOutStartedEvent`. + + Emitted when the driver starts attempting to check out a connection. + + :Parameters: + - `event`: An instance of :class:`ConnectionCheckOutStartedEvent`. + """ + raise NotImplementedError + + def connection_check_out_failed(self, event: "ConnectionCheckOutFailedEvent") -> None: + """Abstract method to handle a :class:`ConnectionCheckOutFailedEvent`. + + Emitted when the driver's attempt to check out a connection fails. + + :Parameters: + - `event`: An instance of :class:`ConnectionCheckOutFailedEvent`. + """ + raise NotImplementedError + + def connection_checked_out(self, event: "ConnectionCheckedOutEvent") -> None: + """Abstract method to handle a :class:`ConnectionCheckedOutEvent`. + + Emitted when the driver successfully checks out a connection. + + :Parameters: + - `event`: An instance of :class:`ConnectionCheckedOutEvent`. + """ + raise NotImplementedError + + def connection_checked_in(self, event: "ConnectionCheckedInEvent") -> None: + """Abstract method to handle a :class:`ConnectionCheckedInEvent`. + + Emitted when the driver checks in a connection back to the connection + Pool. + + :Parameters: + - `event`: An instance of :class:`ConnectionCheckedInEvent`. + """ + raise NotImplementedError + + +class ServerHeartbeatListener(_EventListener): + """Abstract base class for server heartbeat listeners. + + Handles `ServerHeartbeatStartedEvent`, `ServerHeartbeatSucceededEvent`, + and `ServerHeartbeatFailedEvent`. + + .. versionadded:: 3.3 + """ + + def started(self, event: "ServerHeartbeatStartedEvent") -> None: + """Abstract method to handle a `ServerHeartbeatStartedEvent`. + + :Parameters: + - `event`: An instance of :class:`ServerHeartbeatStartedEvent`. + """ + raise NotImplementedError + + def succeeded(self, event: "ServerHeartbeatSucceededEvent") -> None: + """Abstract method to handle a `ServerHeartbeatSucceededEvent`. + + :Parameters: + - `event`: An instance of :class:`ServerHeartbeatSucceededEvent`. + """ + raise NotImplementedError + + def failed(self, event: "ServerHeartbeatFailedEvent") -> None: + """Abstract method to handle a `ServerHeartbeatFailedEvent`. + + :Parameters: + - `event`: An instance of :class:`ServerHeartbeatFailedEvent`. + """ + raise NotImplementedError + + +class TopologyListener(_EventListener): + """Abstract base class for topology monitoring listeners. + Handles `TopologyOpenedEvent`, `TopologyDescriptionChangedEvent`, and + `TopologyClosedEvent`. + + .. versionadded:: 3.3 + """ + + def opened(self, event: "TopologyOpenedEvent") -> None: + """Abstract method to handle a `TopologyOpenedEvent`. + + :Parameters: + - `event`: An instance of :class:`TopologyOpenedEvent`. + """ + raise NotImplementedError + + def description_changed(self, event: "TopologyDescriptionChangedEvent") -> None: + """Abstract method to handle a `TopologyDescriptionChangedEvent`. + + :Parameters: + - `event`: An instance of :class:`TopologyDescriptionChangedEvent`. + """ + raise NotImplementedError + + def closed(self, event: "TopologyClosedEvent") -> None: + """Abstract method to handle a `TopologyClosedEvent`. + + :Parameters: + - `event`: An instance of :class:`TopologyClosedEvent`. + """ + raise NotImplementedError + + +class ServerListener(_EventListener): + """Abstract base class for server listeners. + Handles `ServerOpeningEvent`, `ServerDescriptionChangedEvent`, and + `ServerClosedEvent`. + + .. versionadded:: 3.3 + """ + + def opened(self, event: "ServerOpeningEvent") -> None: + """Abstract method to handle a `ServerOpeningEvent`. + + :Parameters: + - `event`: An instance of :class:`ServerOpeningEvent`. + """ + raise NotImplementedError + + def description_changed(self, event: "ServerDescriptionChangedEvent") -> None: + """Abstract method to handle a `ServerDescriptionChangedEvent`. + + :Parameters: + - `event`: An instance of :class:`ServerDescriptionChangedEvent`. + """ + raise NotImplementedError + + def closed(self, event: "ServerClosedEvent") -> None: + """Abstract method to handle a `ServerClosedEvent`. + + :Parameters: + - `event`: An instance of :class:`ServerClosedEvent`. + """ + raise NotImplementedError + + +def _to_micros(dur: timedelta) -> int: + """Convert duration 'dur' to microseconds.""" + return int(dur.total_seconds() * 10e5) + + +def _validate_event_listeners( + option: str, listeners: Sequence[_EventListeners] +) -> Sequence[_EventListeners]: + """Validate event listeners""" + if not isinstance(listeners, abc.Sequence): + raise TypeError(f"{option} must be a list or tuple") + for listener in listeners: + if not isinstance(listener, _EventListener): + raise TypeError( + "Listeners for {} must be either a " + "CommandListener, ServerHeartbeatListener, " + "ServerListener, TopologyListener, or " + "ConnectionPoolListener.".format(option) + ) + return listeners + + +def register(listener: _EventListener) -> None: + """Register a global event listener. + + :Parameters: + - `listener`: A subclasses of :class:`CommandListener`, + :class:`ServerHeartbeatListener`, :class:`ServerListener`, + :class:`TopologyListener`, or :class:`ConnectionPoolListener`. + """ + if not isinstance(listener, _EventListener): + raise TypeError( + "Listeners for {} must be either a " + "CommandListener, ServerHeartbeatListener, " + "ServerListener, TopologyListener, or " + "ConnectionPoolListener.".format(listener) + ) + if isinstance(listener, CommandListener): + _LISTENERS.command_listeners.append(listener) + if isinstance(listener, ServerHeartbeatListener): + _LISTENERS.server_heartbeat_listeners.append(listener) + if isinstance(listener, ServerListener): + _LISTENERS.server_listeners.append(listener) + if isinstance(listener, TopologyListener): + _LISTENERS.topology_listeners.append(listener) + if isinstance(listener, ConnectionPoolListener): + _LISTENERS.cmap_listeners.append(listener) + + +# Note - to avoid bugs from forgetting which if these is all lowercase and +# which are camelCase, and at the same time avoid having to add a test for +# every command, use all lowercase here and test against command_name.lower(). +_SENSITIVE_COMMANDS: set = { + "authenticate", + "saslstart", + "saslcontinue", + "getnonce", + "createuser", + "updateuser", + "copydbgetnonce", + "copydbsaslstart", + "copydb", +} + + +# The "hello" command is also deemed sensitive when attempting speculative +# authentication. +def _is_speculative_authenticate(command_name: str, doc: Mapping[str, Any]) -> bool: + if ( + command_name.lower() in ("hello", HelloCompat.LEGACY_CMD) + and "speculativeAuthenticate" in doc + ): + return True + return False + + +class _CommandEvent: + """Base class for command events.""" + + __slots__ = ("__cmd_name", "__rqst_id", "__conn_id", "__op_id", "__service_id") + + def __init__( + self, + command_name: str, + request_id: int, + connection_id: _Address, + operation_id: Optional[int], + service_id: Optional[ObjectId] = None, + ) -> None: + self.__cmd_name = command_name + self.__rqst_id = request_id + self.__conn_id = connection_id + self.__op_id = operation_id + self.__service_id = service_id + + @property + def command_name(self) -> str: + """The command name.""" + return self.__cmd_name + + @property + def request_id(self) -> int: + """The request id for this operation.""" + return self.__rqst_id + + @property + def connection_id(self) -> _Address: + """The address (host, port) of the server this command was sent to.""" + return self.__conn_id + + @property + def service_id(self) -> Optional[ObjectId]: + """The service_id this command was sent to, or ``None``. + + .. versionadded:: 3.12 + """ + return self.__service_id + + @property + def operation_id(self) -> Optional[int]: + """An id for this series of events or None.""" + return self.__op_id + + +class CommandStartedEvent(_CommandEvent): + """Event published when a command starts. + + :Parameters: + - `command`: The command document. + - `database_name`: The name of the database this command was run against. + - `request_id`: The request id for this operation. + - `connection_id`: The address (host, port) of the server this command + was sent to. + - `operation_id`: An optional identifier for a series of related events. + - `service_id`: The service_id this command was sent to, or ``None``. + """ + + __slots__ = ("__cmd", "__db") + + def __init__( + self, + command: _DocumentOut, + database_name: str, + request_id: int, + connection_id: _Address, + operation_id: Optional[int], + service_id: Optional[ObjectId] = None, + ) -> None: + if not command: + raise ValueError(f"{command!r} is not a valid command") + # Command name must be first key. + command_name = next(iter(command)) + super().__init__( + command_name, request_id, connection_id, operation_id, service_id=service_id + ) + cmd_name = command_name.lower() + if cmd_name in _SENSITIVE_COMMANDS or _is_speculative_authenticate(cmd_name, command): + self.__cmd: _DocumentOut = {} + else: + self.__cmd = command + self.__db = database_name + + @property + def command(self) -> _DocumentOut: + """The command document.""" + return self.__cmd + + @property + def database_name(self) -> str: + """The name of the database this command was run against.""" + return self.__db + + def __repr__(self) -> str: + return ("<{} {} db: {!r}, command: {!r}, operation_id: {}, service_id: {}>").format( + self.__class__.__name__, + self.connection_id, + self.database_name, + self.command_name, + self.operation_id, + self.service_id, + ) + + +class CommandSucceededEvent(_CommandEvent): + """Event published when a command succeeds. + + :Parameters: + - `duration`: The command duration as a datetime.timedelta. + - `reply`: The server reply document. + - `command_name`: The command name. + - `request_id`: The request id for this operation. + - `connection_id`: The address (host, port) of the server this command + was sent to. + - `operation_id`: An optional identifier for a series of related events. + - `service_id`: The service_id this command was sent to, or ``None``. + """ + + __slots__ = ("__duration_micros", "__reply") + + def __init__( + self, + duration: datetime.timedelta, + reply: _DocumentOut, + command_name: str, + request_id: int, + connection_id: _Address, + operation_id: Optional[int], + service_id: Optional[ObjectId] = None, + ) -> None: + super().__init__( + command_name, request_id, connection_id, operation_id, service_id=service_id + ) + self.__duration_micros = _to_micros(duration) + cmd_name = command_name.lower() + if cmd_name in _SENSITIVE_COMMANDS or _is_speculative_authenticate(cmd_name, reply): + self.__reply: _DocumentOut = {} + else: + self.__reply = reply + + @property + def duration_micros(self) -> int: + """The duration of this operation in microseconds.""" + return self.__duration_micros + + @property + def reply(self) -> _DocumentOut: + """The server failure document for this operation.""" + return self.__reply + + def __repr__(self) -> str: + return ( + "<{} {} command: {!r}, operation_id: {}, duration_micros: {}, service_id: {}>" + ).format( + self.__class__.__name__, + self.connection_id, + self.command_name, + self.operation_id, + self.duration_micros, + self.service_id, + ) + + +class CommandFailedEvent(_CommandEvent): + """Event published when a command fails. + + :Parameters: + - `duration`: The command duration as a datetime.timedelta. + - `failure`: The server reply document. + - `command_name`: The command name. + - `request_id`: The request id for this operation. + - `connection_id`: The address (host, port) of the server this command + was sent to. + - `operation_id`: An optional identifier for a series of related events. + - `service_id`: The service_id this command was sent to, or ``None``. + """ + + __slots__ = ("__duration_micros", "__failure") + + def __init__( + self, + duration: datetime.timedelta, + failure: _DocumentOut, + command_name: str, + request_id: int, + connection_id: _Address, + operation_id: Optional[int], + service_id: Optional[ObjectId] = None, + ) -> None: + super().__init__( + command_name, request_id, connection_id, operation_id, service_id=service_id + ) + self.__duration_micros = _to_micros(duration) + self.__failure = failure + + @property + def duration_micros(self) -> int: + """The duration of this operation in microseconds.""" + return self.__duration_micros + + @property + def failure(self) -> _DocumentOut: + """The server failure document for this operation.""" + return self.__failure + + def __repr__(self) -> str: + return ( + "<{} {} command: {!r}, operation_id: {}, duration_micros: {}, " + "failure: {!r}, service_id: {}>" + ).format( + self.__class__.__name__, + self.connection_id, + self.command_name, + self.operation_id, + self.duration_micros, + self.failure, + self.service_id, + ) + + +class _PoolEvent: + """Base class for pool events.""" + + __slots__ = ("__address",) + + def __init__(self, address: _Address) -> None: + self.__address = address + + @property + def address(self) -> _Address: + """The address (host, port) pair of the server the pool is attempting + to connect to. + """ + return self.__address + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.__address!r})" + + +class PoolCreatedEvent(_PoolEvent): + """Published when a Connection Pool is created. + + :Parameters: + - `address`: The address (host, port) pair of the server this Pool is + attempting to connect to. + + .. versionadded:: 3.9 + """ + + __slots__ = ("__options",) + + def __init__(self, address: _Address, options: Dict[str, Any]) -> None: + super().__init__(address) + self.__options = options + + @property + def options(self) -> Dict[str, Any]: + """Any non-default pool options that were set on this Connection Pool.""" + return self.__options + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.address!r}, {self.__options!r})" + + +class PoolReadyEvent(_PoolEvent): + """Published when a Connection Pool is marked ready. + + :Parameters: + - `address`: The address (host, port) pair of the server this Pool is + attempting to connect to. + + .. versionadded:: 4.0 + """ + + __slots__ = () + + +class PoolClearedEvent(_PoolEvent): + """Published when a Connection Pool is cleared. + + :Parameters: + - `address`: The address (host, port) pair of the server this Pool is + attempting to connect to. + - `service_id`: The service_id this command was sent to, or ``None``. + + .. versionadded:: 3.9 + """ + + __slots__ = ("__service_id",) + + def __init__(self, address: _Address, service_id: Optional[ObjectId] = None) -> None: + super().__init__(address) + self.__service_id = service_id + + @property + def service_id(self) -> Optional[ObjectId]: + """Connections with this service_id are cleared. + + When service_id is ``None``, all connections in the pool are cleared. + + .. versionadded:: 3.12 + """ + return self.__service_id + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.address!r}, {self.__service_id!r})" + + +class PoolClosedEvent(_PoolEvent): + """Published when a Connection Pool is closed. + + :Parameters: + - `address`: The address (host, port) pair of the server this Pool is + attempting to connect to. + + .. versionadded:: 3.9 + """ + + __slots__ = () + + +class ConnectionClosedReason: + """An enum that defines values for `reason` on a + :class:`ConnectionClosedEvent`. + + .. versionadded:: 3.9 + """ + + STALE = "stale" + """The pool was cleared, making the connection no longer valid.""" + + IDLE = "idle" + """The connection became stale by being idle for too long (maxIdleTimeMS). + """ + + ERROR = "error" + """The connection experienced an error, making it no longer valid.""" + + POOL_CLOSED = "poolClosed" + """The pool was closed, making the connection no longer valid.""" + + +class ConnectionCheckOutFailedReason: + """An enum that defines values for `reason` on a + :class:`ConnectionCheckOutFailedEvent`. + + .. versionadded:: 3.9 + """ + + TIMEOUT = "timeout" + """The connection check out attempt exceeded the specified timeout.""" + + POOL_CLOSED = "poolClosed" + """The pool was previously closed, and cannot provide new connections.""" + + CONN_ERROR = "connectionError" + """The connection check out attempt experienced an error while setting up + a new connection. + """ + + +class _ConnectionEvent: + """Private base class for connection events.""" + + __slots__ = ("__address",) + + def __init__(self, address: _Address) -> None: + self.__address = address + + @property + def address(self) -> _Address: + """The address (host, port) pair of the server this connection is + attempting to connect to. + """ + return self.__address + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.__address!r})" + + +class _ConnectionIdEvent(_ConnectionEvent): + """Private base class for connection events with an id.""" + + __slots__ = ("__connection_id",) + + def __init__(self, address: _Address, connection_id: int) -> None: + super().__init__(address) + self.__connection_id = connection_id + + @property + def connection_id(self) -> int: + """The ID of the connection.""" + return self.__connection_id + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.address!r}, {self.__connection_id!r})" + + +class ConnectionCreatedEvent(_ConnectionIdEvent): + """Published when a Connection Pool creates a Connection object. + + NOTE: This connection is not ready for use until the + :class:`ConnectionReadyEvent` is published. + + :Parameters: + - `address`: The address (host, port) pair of the server this + Connection is attempting to connect to. + - `connection_id`: The integer ID of the Connection in this Pool. + + .. versionadded:: 3.9 + """ + + __slots__ = () + + +class ConnectionReadyEvent(_ConnectionIdEvent): + """Published when a Connection has finished its setup, and is ready to use. + + :Parameters: + - `address`: The address (host, port) pair of the server this + Connection is attempting to connect to. + - `connection_id`: The integer ID of the Connection in this Pool. + + .. versionadded:: 3.9 + """ + + __slots__ = () + + +class ConnectionClosedEvent(_ConnectionIdEvent): + """Published when a Connection is closed. + + :Parameters: + - `address`: The address (host, port) pair of the server this + Connection is attempting to connect to. + - `connection_id`: The integer ID of the Connection in this Pool. + - `reason`: A reason explaining why this connection was closed. + + .. versionadded:: 3.9 + """ + + __slots__ = ("__reason",) + + def __init__(self, address: _Address, connection_id: int, reason: str): + super().__init__(address, connection_id) + self.__reason = reason + + @property + def reason(self) -> str: + """A reason explaining why this connection was closed. + + The reason must be one of the strings from the + :class:`ConnectionClosedReason` enum. + """ + return self.__reason + + def __repr__(self) -> str: + return "{}({!r}, {!r}, {!r})".format( + self.__class__.__name__, + self.address, + self.connection_id, + self.__reason, + ) + + +class ConnectionCheckOutStartedEvent(_ConnectionEvent): + """Published when the driver starts attempting to check out a connection. + + :Parameters: + - `address`: The address (host, port) pair of the server this + Connection is attempting to connect to. + + .. versionadded:: 3.9 + """ + + __slots__ = () + + +class ConnectionCheckOutFailedEvent(_ConnectionEvent): + """Published when the driver's attempt to check out a connection fails. + + :Parameters: + - `address`: The address (host, port) pair of the server this + Connection is attempting to connect to. + - `reason`: A reason explaining why connection check out failed. + + .. versionadded:: 3.9 + """ + + __slots__ = ("__reason",) + + def __init__(self, address: _Address, reason: str) -> None: + super().__init__(address) + self.__reason = reason + + @property + def reason(self) -> str: + """A reason explaining why connection check out failed. + + The reason must be one of the strings from the + :class:`ConnectionCheckOutFailedReason` enum. + """ + return self.__reason + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.address!r}, {self.__reason!r})" + + +class ConnectionCheckedOutEvent(_ConnectionIdEvent): + """Published when the driver successfully checks out a connection. + + :Parameters: + - `address`: The address (host, port) pair of the server this + Connection is attempting to connect to. + - `connection_id`: The integer ID of the Connection in this Pool. + + .. versionadded:: 3.9 + """ + + __slots__ = () + + +class ConnectionCheckedInEvent(_ConnectionIdEvent): + """Published when the driver checks in a Connection into the Pool. + + :Parameters: + - `address`: The address (host, port) pair of the server this + Connection is attempting to connect to. + - `connection_id`: The integer ID of the Connection in this Pool. + + .. versionadded:: 3.9 + """ + + __slots__ = () + + +class _ServerEvent: + """Base class for server events.""" + + __slots__ = ("__server_address", "__topology_id") + + def __init__(self, server_address: _Address, topology_id: ObjectId) -> None: + self.__server_address = server_address + self.__topology_id = topology_id + + @property + def server_address(self) -> _Address: + """The address (host, port) pair of the server""" + return self.__server_address + + @property + def topology_id(self) -> ObjectId: + """A unique identifier for the topology this server is a part of.""" + return self.__topology_id + + def __repr__(self) -> str: + return "<{} {} topology_id: {}>".format( + self.__class__.__name__, + self.server_address, + self.topology_id, + ) + + +class ServerDescriptionChangedEvent(_ServerEvent): + """Published when server description changes. + + .. versionadded:: 3.3 + """ + + __slots__ = ("__previous_description", "__new_description") + + def __init__( + self, + previous_description: ServerDescription, + new_description: ServerDescription, + *args: Any, + ) -> None: + super().__init__(*args) + self.__previous_description = previous_description + self.__new_description = new_description + + @property + def previous_description(self) -> ServerDescription: + """The previous + :class:`~pymongo.server_description.ServerDescription`. + """ + return self.__previous_description + + @property + def new_description(self) -> ServerDescription: + """The new + :class:`~pymongo.server_description.ServerDescription`. + """ + return self.__new_description + + def __repr__(self) -> str: + return "<{} {} changed from: {}, to: {}>".format( + self.__class__.__name__, + self.server_address, + self.previous_description, + self.new_description, + ) + + +class ServerOpeningEvent(_ServerEvent): + """Published when server is initialized. + + .. versionadded:: 3.3 + """ + + __slots__ = () + + +class ServerClosedEvent(_ServerEvent): + """Published when server is closed. + + .. versionadded:: 3.3 + """ + + __slots__ = () + + +class TopologyEvent: + """Base class for topology description events.""" + + __slots__ = "__topology_id" + + def __init__(self, topology_id: ObjectId) -> None: + self.__topology_id = topology_id + + @property + def topology_id(self) -> ObjectId: + """A unique identifier for the topology this server is a part of.""" + return self.__topology_id + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} topology_id: {self.topology_id}>" + + +class TopologyDescriptionChangedEvent(TopologyEvent): + """Published when the topology description changes. + + .. versionadded:: 3.3 + """ + + __slots__ = ("__previous_description", "__new_description") + + def __init__( + self, + previous_description: TopologyDescription, + new_description: TopologyDescription, + *args: Any, + ) -> None: + super().__init__(*args) + self.__previous_description = previous_description + self.__new_description = new_description + + @property + def previous_description(self) -> TopologyDescription: + """The previous + :class:`~pymongo.topology_description.TopologyDescription`. + """ + return self.__previous_description + + @property + def new_description(self) -> TopologyDescription: + """The new + :class:`~pymongo.topology_description.TopologyDescription`. + """ + return self.__new_description + + def __repr__(self) -> str: + return "<{} topology_id: {} changed from: {}, to: {}>".format( + self.__class__.__name__, + self.topology_id, + self.previous_description, + self.new_description, + ) + + +class TopologyOpenedEvent(TopologyEvent): + """Published when the topology is initialized. + + .. versionadded:: 3.3 + """ + + __slots__ = () + + +class TopologyClosedEvent(TopologyEvent): + """Published when the topology is closed. + + .. versionadded:: 3.3 + """ + + __slots__ = () + + +class _ServerHeartbeatEvent: + """Base class for server heartbeat events.""" + + __slots__ = "__connection_id" + + def __init__(self, connection_id: _Address) -> None: + self.__connection_id = connection_id + + @property + def connection_id(self) -> _Address: + """The address (host, port) of the server this heartbeat was sent + to. + """ + return self.__connection_id + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self.connection_id}>" + + +class ServerHeartbeatStartedEvent(_ServerHeartbeatEvent): + """Published when a heartbeat is started. + + .. versionadded:: 3.3 + """ + + __slots__ = () + + +class ServerHeartbeatSucceededEvent(_ServerHeartbeatEvent): + """Fired when the server heartbeat succeeds. + + .. versionadded:: 3.3 + """ + + __slots__ = ("__duration", "__reply", "__awaited") + + def __init__( + self, duration: float, reply: Hello, connection_id: _Address, awaited: bool = False + ) -> None: + super().__init__(connection_id) + self.__duration = duration + self.__reply = reply + self.__awaited = awaited + + @property + def duration(self) -> float: + """The duration of this heartbeat in microseconds.""" + return self.__duration + + @property + def reply(self) -> Hello: + """An instance of :class:`~pymongo.hello.Hello`.""" + return self.__reply + + @property + def awaited(self) -> bool: + """Whether the heartbeat was awaited. + + If true, then :meth:`duration` reflects the sum of the round trip time + to the server and the time that the server waited before sending a + response. + """ + return self.__awaited + + def __repr__(self) -> str: + return "<{} {} duration: {}, awaited: {}, reply: {}>".format( + self.__class__.__name__, + self.connection_id, + self.duration, + self.awaited, + self.reply, + ) + + +class ServerHeartbeatFailedEvent(_ServerHeartbeatEvent): + """Fired when the server heartbeat fails, either with an "ok: 0" + or a socket exception. + + .. versionadded:: 3.3 + """ + + __slots__ = ("__duration", "__reply", "__awaited") + + def __init__( + self, duration: float, reply: Exception, connection_id: _Address, awaited: bool = False + ) -> None: + super().__init__(connection_id) + self.__duration = duration + self.__reply = reply + self.__awaited = awaited + + @property + def duration(self) -> float: + """The duration of this heartbeat in microseconds.""" + return self.__duration + + @property + def reply(self) -> Exception: + """A subclass of :exc:`Exception`.""" + return self.__reply + + @property + def awaited(self) -> bool: + """Whether the heartbeat was awaited. + + If true, then :meth:`duration` reflects the sum of the round trip time + to the server and the time that the server waited before sending a + response. + """ + return self.__awaited + + def __repr__(self) -> str: + return "<{} {} duration: {}, awaited: {}, reply: {!r}>".format( + self.__class__.__name__, + self.connection_id, + self.duration, + self.awaited, + self.reply, + ) + + +class _EventListeners: + """Configure event listeners for a client instance. + + Any event listeners registered globally are included by default. + + :Parameters: + - `listeners`: A list of event listeners. + """ + + def __init__(self, listeners: Optional[Sequence[_EventListener]]): + self.__command_listeners = _LISTENERS.command_listeners[:] + self.__server_listeners = _LISTENERS.server_listeners[:] + lst = _LISTENERS.server_heartbeat_listeners + self.__server_heartbeat_listeners = lst[:] + self.__topology_listeners = _LISTENERS.topology_listeners[:] + self.__cmap_listeners = _LISTENERS.cmap_listeners[:] + if listeners is not None: + for lst in listeners: + if isinstance(lst, CommandListener): + self.__command_listeners.append(lst) + if isinstance(lst, ServerListener): + self.__server_listeners.append(lst) + if isinstance(lst, ServerHeartbeatListener): + self.__server_heartbeat_listeners.append(lst) + if isinstance(lst, TopologyListener): + self.__topology_listeners.append(lst) + if isinstance(lst, ConnectionPoolListener): + self.__cmap_listeners.append(lst) + self.__enabled_for_commands = bool(self.__command_listeners) + self.__enabled_for_server = bool(self.__server_listeners) + self.__enabled_for_server_heartbeat = bool(self.__server_heartbeat_listeners) + self.__enabled_for_topology = bool(self.__topology_listeners) + self.__enabled_for_cmap = bool(self.__cmap_listeners) + + @property + def enabled_for_commands(self) -> bool: + """Are any CommandListener instances registered?""" + return self.__enabled_for_commands + + @property + def enabled_for_server(self) -> bool: + """Are any ServerListener instances registered?""" + return self.__enabled_for_server + + @property + def enabled_for_server_heartbeat(self) -> bool: + """Are any ServerHeartbeatListener instances registered?""" + return self.__enabled_for_server_heartbeat + + @property + def enabled_for_topology(self) -> bool: + """Are any TopologyListener instances registered?""" + return self.__enabled_for_topology + + @property + def enabled_for_cmap(self) -> bool: + """Are any ConnectionPoolListener instances registered?""" + return self.__enabled_for_cmap + + def event_listeners(self) -> List[_EventListeners]: + """List of registered event listeners.""" + return ( + self.__command_listeners + + self.__server_heartbeat_listeners + + self.__server_listeners + + self.__topology_listeners + + self.__cmap_listeners + ) + + def publish_command_start( + self, + command: _DocumentOut, + database_name: str, + request_id: int, + connection_id: _Address, + op_id: Optional[int] = None, + service_id: Optional[ObjectId] = None, + ) -> None: + """Publish a CommandStartedEvent to all command listeners. + + :Parameters: + - `command`: The command document. + - `database_name`: The name of the database this command was run + against. + - `request_id`: The request id for this operation. + - `connection_id`: The address (host, port) of the server this + command was sent to. + - `op_id`: The (optional) operation id for this operation. + - `service_id`: The service_id this command was sent to, or ``None``. + """ + if op_id is None: + op_id = request_id + event = CommandStartedEvent( + command, database_name, request_id, connection_id, op_id, service_id=service_id + ) + for subscriber in self.__command_listeners: + try: + subscriber.started(event) + except Exception: + _handle_exception() + + def publish_command_success( + self, + duration: timedelta, + reply: _DocumentOut, + command_name: str, + request_id: int, + connection_id: _Address, + op_id: Optional[int] = None, + service_id: Optional[ObjectId] = None, + speculative_hello: bool = False, + ) -> None: + """Publish a CommandSucceededEvent to all command listeners. + + :Parameters: + - `duration`: The command duration as a datetime.timedelta. + - `reply`: The server reply document. + - `command_name`: The command name. + - `request_id`: The request id for this operation. + - `connection_id`: The address (host, port) of the server this + command was sent to. + - `op_id`: The (optional) operation id for this operation. + - `service_id`: The service_id this command was sent to, or ``None``. + - `speculative_hello`: Was the command sent with speculative auth? + """ + if op_id is None: + op_id = request_id + if speculative_hello: + # Redact entire response when the command started contained + # speculativeAuthenticate. + reply = {} + event = CommandSucceededEvent( + duration, reply, command_name, request_id, connection_id, op_id, service_id + ) + for subscriber in self.__command_listeners: + try: + subscriber.succeeded(event) + except Exception: + _handle_exception() + + def publish_command_failure( + self, + duration: timedelta, + failure: _DocumentOut, + command_name: str, + request_id: int, + connection_id: _Address, + op_id: Optional[int] = None, + service_id: Optional[ObjectId] = None, + ) -> None: + """Publish a CommandFailedEvent to all command listeners. + + :Parameters: + - `duration`: The command duration as a datetime.timedelta. + - `failure`: The server reply document or failure description + document. + - `command_name`: The command name. + - `request_id`: The request id for this operation. + - `connection_id`: The address (host, port) of the server this + command was sent to. + - `op_id`: The (optional) operation id for this operation. + - `service_id`: The service_id this command was sent to, or ``None``. + """ + if op_id is None: + op_id = request_id + event = CommandFailedEvent( + duration, failure, command_name, request_id, connection_id, op_id, service_id=service_id + ) + for subscriber in self.__command_listeners: + try: + subscriber.failed(event) + except Exception: + _handle_exception() + + def publish_server_heartbeat_started(self, connection_id: _Address) -> None: + """Publish a ServerHeartbeatStartedEvent to all server heartbeat + listeners. + + :Parameters: + - `connection_id`: The address (host, port) pair of the connection. + """ + event = ServerHeartbeatStartedEvent(connection_id) + for subscriber in self.__server_heartbeat_listeners: + try: + subscriber.started(event) + except Exception: + _handle_exception() + + def publish_server_heartbeat_succeeded( + self, connection_id: _Address, duration: float, reply: Hello, awaited: bool + ) -> None: + """Publish a ServerHeartbeatSucceededEvent to all server heartbeat + listeners. + + :Parameters: + - `connection_id`: The address (host, port) pair of the connection. + - `duration`: The execution time of the event in the highest possible + resolution for the platform. + - `reply`: The command reply. + - `awaited`: True if the response was awaited. + """ + event = ServerHeartbeatSucceededEvent(duration, reply, connection_id, awaited) + for subscriber in self.__server_heartbeat_listeners: + try: + subscriber.succeeded(event) + except Exception: + _handle_exception() + + def publish_server_heartbeat_failed( + self, connection_id: _Address, duration: float, reply: Exception, awaited: bool + ) -> None: + """Publish a ServerHeartbeatFailedEvent to all server heartbeat + listeners. + + :Parameters: + - `connection_id`: The address (host, port) pair of the connection. + - `duration`: The execution time of the event in the highest possible + resolution for the platform. + - `reply`: The command reply. + - `awaited`: True if the response was awaited. + """ + event = ServerHeartbeatFailedEvent(duration, reply, connection_id, awaited) + for subscriber in self.__server_heartbeat_listeners: + try: + subscriber.failed(event) + except Exception: + _handle_exception() + + def publish_server_opened(self, server_address: _Address, topology_id: ObjectId) -> None: + """Publish a ServerOpeningEvent to all server listeners. + + :Parameters: + - `server_address`: The address (host, port) pair of the server. + - `topology_id`: A unique identifier for the topology this server + is a part of. + """ + event = ServerOpeningEvent(server_address, topology_id) + for subscriber in self.__server_listeners: + try: + subscriber.opened(event) + except Exception: + _handle_exception() + + def publish_server_closed(self, server_address: _Address, topology_id: ObjectId) -> None: + """Publish a ServerClosedEvent to all server listeners. + + :Parameters: + - `server_address`: The address (host, port) pair of the server. + - `topology_id`: A unique identifier for the topology this server + is a part of. + """ + event = ServerClosedEvent(server_address, topology_id) + for subscriber in self.__server_listeners: + try: + subscriber.closed(event) + except Exception: + _handle_exception() + + def publish_server_description_changed( + self, + previous_description: ServerDescription, + new_description: ServerDescription, + server_address: _Address, + topology_id: ObjectId, + ) -> None: + """Publish a ServerDescriptionChangedEvent to all server listeners. + + :Parameters: + - `previous_description`: The previous server description. + - `server_address`: The address (host, port) pair of the server. + - `new_description`: The new server description. + - `topology_id`: A unique identifier for the topology this server + is a part of. + """ + event = ServerDescriptionChangedEvent( + previous_description, new_description, server_address, topology_id + ) + for subscriber in self.__server_listeners: + try: + subscriber.description_changed(event) + except Exception: + _handle_exception() + + def publish_topology_opened(self, topology_id: ObjectId) -> None: + """Publish a TopologyOpenedEvent to all topology listeners. + + :Parameters: + - `topology_id`: A unique identifier for the topology this server + is a part of. + """ + event = TopologyOpenedEvent(topology_id) + for subscriber in self.__topology_listeners: + try: + subscriber.opened(event) + except Exception: + _handle_exception() + + def publish_topology_closed(self, topology_id: ObjectId) -> None: + """Publish a TopologyClosedEvent to all topology listeners. + + :Parameters: + - `topology_id`: A unique identifier for the topology this server + is a part of. + """ + event = TopologyClosedEvent(topology_id) + for subscriber in self.__topology_listeners: + try: + subscriber.closed(event) + except Exception: + _handle_exception() + + def publish_topology_description_changed( + self, + previous_description: TopologyDescription, + new_description: TopologyDescription, + topology_id: ObjectId, + ) -> None: + """Publish a TopologyDescriptionChangedEvent to all topology listeners. + + :Parameters: + - `previous_description`: The previous topology description. + - `new_description`: The new topology description. + - `topology_id`: A unique identifier for the topology this server + is a part of. + """ + event = TopologyDescriptionChangedEvent(previous_description, new_description, topology_id) + for subscriber in self.__topology_listeners: + try: + subscriber.description_changed(event) + except Exception: + _handle_exception() + + def publish_pool_created(self, address: _Address, options: Dict[str, Any]) -> None: + """Publish a :class:`PoolCreatedEvent` to all pool listeners.""" + event = PoolCreatedEvent(address, options) + for subscriber in self.__cmap_listeners: + try: + subscriber.pool_created(event) + except Exception: + _handle_exception() + + def publish_pool_ready(self, address: _Address) -> None: + """Publish a :class:`PoolReadyEvent` to all pool listeners.""" + event = PoolReadyEvent(address) + for subscriber in self.__cmap_listeners: + try: + subscriber.pool_ready(event) + except Exception: + _handle_exception() + + def publish_pool_cleared(self, address: _Address, service_id: Optional[ObjectId]) -> None: + """Publish a :class:`PoolClearedEvent` to all pool listeners.""" + event = PoolClearedEvent(address, service_id) + for subscriber in self.__cmap_listeners: + try: + subscriber.pool_cleared(event) + except Exception: + _handle_exception() + + def publish_pool_closed(self, address: _Address) -> None: + """Publish a :class:`PoolClosedEvent` to all pool listeners.""" + event = PoolClosedEvent(address) + for subscriber in self.__cmap_listeners: + try: + subscriber.pool_closed(event) + except Exception: + _handle_exception() + + def publish_connection_created(self, address: _Address, connection_id: int) -> None: + """Publish a :class:`ConnectionCreatedEvent` to all connection + listeners. + """ + event = ConnectionCreatedEvent(address, connection_id) + for subscriber in self.__cmap_listeners: + try: + subscriber.connection_created(event) + except Exception: + _handle_exception() + + def publish_connection_ready(self, address: _Address, connection_id: int) -> None: + """Publish a :class:`ConnectionReadyEvent` to all connection listeners.""" + event = ConnectionReadyEvent(address, connection_id) + for subscriber in self.__cmap_listeners: + try: + subscriber.connection_ready(event) + except Exception: + _handle_exception() + + def publish_connection_closed(self, address: _Address, connection_id: int, reason: str) -> None: + """Publish a :class:`ConnectionClosedEvent` to all connection + listeners. + """ + event = ConnectionClosedEvent(address, connection_id, reason) + for subscriber in self.__cmap_listeners: + try: + subscriber.connection_closed(event) + except Exception: + _handle_exception() + + def publish_connection_check_out_started(self, address: _Address) -> None: + """Publish a :class:`ConnectionCheckOutStartedEvent` to all connection + listeners. + """ + event = ConnectionCheckOutStartedEvent(address) + for subscriber in self.__cmap_listeners: + try: + subscriber.connection_check_out_started(event) + except Exception: + _handle_exception() + + def publish_connection_check_out_failed(self, address: _Address, reason: str) -> None: + """Publish a :class:`ConnectionCheckOutFailedEvent` to all connection + listeners. + """ + event = ConnectionCheckOutFailedEvent(address, reason) + for subscriber in self.__cmap_listeners: + try: + subscriber.connection_check_out_failed(event) + except Exception: + _handle_exception() + + def publish_connection_checked_out(self, address: _Address, connection_id: int) -> None: + """Publish a :class:`ConnectionCheckedOutEvent` to all connection + listeners. + """ + event = ConnectionCheckedOutEvent(address, connection_id) + for subscriber in self.__cmap_listeners: + try: + subscriber.connection_checked_out(event) + except Exception: + _handle_exception() + + def publish_connection_checked_in(self, address: _Address, connection_id: int) -> None: + """Publish a :class:`ConnectionCheckedInEvent` to all connection + listeners. + """ + event = ConnectionCheckedInEvent(address, connection_id) + for subscriber in self.__cmap_listeners: + try: + subscriber.connection_checked_in(event) + except Exception: + _handle_exception() diff --git a/backend/test/lib/python3.8/site-packages/pymongo/network.py b/backend/test/lib/python3.8/site-packages/pymongo/network.py new file mode 100644 index 0000000000000000000000000000000000000000..df540f1a3f82f19d10aacf27b4ec33888a566758 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/network.py @@ -0,0 +1,340 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Internal network layer helper methods.""" +from __future__ import annotations + +import datetime +import errno +import socket +import struct +import time +from typing import ( + TYPE_CHECKING, + Any, + Mapping, + MutableMapping, + Optional, + Sequence, + Union, +) + +from bson import _decode_all_selective +from pymongo import _csot, helpers, message, ssl_support +from pymongo.common import MAX_MESSAGE_SIZE +from pymongo.compression_support import _NO_COMPRESSION, decompress +from pymongo.errors import ( + NotPrimaryError, + OperationFailure, + ProtocolError, + _OperationCancelled, +) +from pymongo.message import _UNPACK_REPLY, _OpMsg, _OpReply +from pymongo.monitoring import _is_speculative_authenticate +from pymongo.socket_checker import _errno_from_exception + +if TYPE_CHECKING: + from bson import CodecOptions + from pymongo.client_session import ClientSession + from pymongo.compression_support import SnappyContext, ZlibContext, ZstdContext + from pymongo.mongo_client import MongoClient + from pymongo.monitoring import _EventListeners + from pymongo.pool import Connection + from pymongo.read_concern import ReadConcern + from pymongo.read_preferences import _ServerMode + from pymongo.typings import _Address, _CollationIn, _DocumentOut, _DocumentType + from pymongo.write_concern import WriteConcern + +_UNPACK_HEADER = struct.Struct("<iiii").unpack + + +def command( + conn: Connection, + dbname: str, + spec: MutableMapping[str, Any], + is_mongos: bool, + read_preference: Optional[_ServerMode], + codec_options: CodecOptions[_DocumentType], + session: Optional[ClientSession], + client: Optional[MongoClient], + check: bool = True, + allowable_errors: Optional[Sequence[Union[str, int]]] = None, + address: Optional[_Address] = None, + listeners: Optional[_EventListeners] = None, + max_bson_size: Optional[int] = None, + read_concern: Optional[ReadConcern] = None, + parse_write_concern_error: bool = False, + collation: Optional[_CollationIn] = None, + compression_ctx: Union[SnappyContext, ZlibContext, ZstdContext, None] = None, + use_op_msg: bool = False, + unacknowledged: bool = False, + user_fields: Optional[Mapping[str, Any]] = None, + exhaust_allowed: bool = False, + write_concern: Optional[WriteConcern] = None, +) -> _DocumentType: + """Execute a command over the socket, or raise socket.error. + + :Parameters: + - `conn`: a Connection instance + - `dbname`: name of the database on which to run the command + - `spec`: a command document as an ordered dict type, eg SON. + - `is_mongos`: are we connected to a mongos? + - `read_preference`: a read preference + - `codec_options`: a CodecOptions instance + - `session`: optional ClientSession instance. + - `client`: optional MongoClient instance for updating $clusterTime. + - `check`: raise OperationFailure if there are errors + - `allowable_errors`: errors to ignore if `check` is True + - `address`: the (host, port) of `conn` + - `listeners`: An instance of :class:`~pymongo.monitoring.EventListeners` + - `max_bson_size`: The maximum encoded bson size for this server + - `read_concern`: The read concern for this command. + - `parse_write_concern_error`: Whether to parse the ``writeConcernError`` + field in the command response. + - `collation`: The collation for this command. + - `compression_ctx`: optional compression Context. + - `use_op_msg`: True if we should use OP_MSG. + - `unacknowledged`: True if this is an unacknowledged command. + - `user_fields` (optional): Response fields that should be decoded + using the TypeDecoders from codec_options, passed to + bson._decode_all_selective. + - `exhaust_allowed`: True if we should enable OP_MSG exhaustAllowed. + """ + name = next(iter(spec)) + ns = dbname + ".$cmd" + speculative_hello = False + + # Publish the original command document, perhaps with lsid and $clusterTime. + orig = spec + if is_mongos and not use_op_msg: + assert read_preference is not None + spec = message._maybe_add_read_preference(spec, read_preference) + if read_concern and not (session and session.in_transaction): + if read_concern.level: + spec["readConcern"] = read_concern.document + if session: + session._update_read_concern(spec, conn) + if collation is not None: + spec["collation"] = collation + + publish = listeners is not None and listeners.enabled_for_commands + if publish: + start = datetime.datetime.now() + speculative_hello = _is_speculative_authenticate(name, spec) + + if compression_ctx and name.lower() in _NO_COMPRESSION: + compression_ctx = None + + if client and client._encrypter and not client._encrypter._bypass_auto_encryption: + spec = orig = client._encrypter.encrypt(dbname, spec, codec_options) + + # Support CSOT + if client: + conn.apply_timeout(client, spec) + _csot.apply_write_concern(spec, write_concern) + + if use_op_msg: + flags = _OpMsg.MORE_TO_COME if unacknowledged else 0 + flags |= _OpMsg.EXHAUST_ALLOWED if exhaust_allowed else 0 + request_id, msg, size, max_doc_size = message._op_msg( + flags, spec, dbname, read_preference, codec_options, ctx=compression_ctx + ) + # If this is an unacknowledged write then make sure the encoded doc(s) + # are small enough, otherwise rely on the server to return an error. + if unacknowledged and max_bson_size is not None and max_doc_size > max_bson_size: + message._raise_document_too_large(name, size, max_bson_size) + else: + request_id, msg, size = message._query( + 0, ns, 0, -1, spec, None, codec_options, compression_ctx + ) + + if max_bson_size is not None and size > max_bson_size + message._COMMAND_OVERHEAD: + message._raise_document_too_large(name, size, max_bson_size + message._COMMAND_OVERHEAD) + + if publish: + encoding_duration = datetime.datetime.now() - start + assert listeners is not None + assert address is not None + listeners.publish_command_start( + orig, dbname, request_id, address, service_id=conn.service_id + ) + start = datetime.datetime.now() + + try: + conn.conn.sendall(msg) + if use_op_msg and unacknowledged: + # Unacknowledged, fake a successful command response. + reply = None + response_doc: _DocumentOut = {"ok": 1} + else: + reply = receive_message(conn, request_id) + conn.more_to_come = reply.more_to_come + unpacked_docs = reply.unpack_response( + codec_options=codec_options, user_fields=user_fields + ) + + response_doc = unpacked_docs[0] + if client: + client._process_response(response_doc, session) + if check: + helpers._check_command_response( + response_doc, + conn.max_wire_version, + allowable_errors, + parse_write_concern_error=parse_write_concern_error, + ) + except Exception as exc: + if publish: + duration = (datetime.datetime.now() - start) + encoding_duration + if isinstance(exc, (NotPrimaryError, OperationFailure)): + failure: _DocumentOut = exc.details # type: ignore[assignment] + else: + failure = message._convert_exception(exc) + assert listeners is not None + assert address is not None + listeners.publish_command_failure( + duration, failure, name, request_id, address, service_id=conn.service_id + ) + raise + if publish: + duration = (datetime.datetime.now() - start) + encoding_duration + assert listeners is not None + assert address is not None + listeners.publish_command_success( + duration, + response_doc, + name, + request_id, + address, + service_id=conn.service_id, + speculative_hello=speculative_hello, + ) + + if client and client._encrypter and reply: + decrypted = client._encrypter.decrypt(reply.raw_command_response()) + response_doc = _decode_all_selective(decrypted, codec_options, user_fields)[0] + + return response_doc # type: ignore[return-value] + + +_UNPACK_COMPRESSION_HEADER = struct.Struct("<iiB").unpack + + +def receive_message( + conn: Connection, request_id: Optional[int], max_message_size: int = MAX_MESSAGE_SIZE +) -> Union[_OpReply, _OpMsg]: + """Receive a raw BSON message or raise socket.error.""" + if _csot.get_timeout(): + deadline = _csot.get_deadline() + else: + timeout = conn.conn.gettimeout() + if timeout: + deadline = time.monotonic() + timeout + else: + deadline = None + # Ignore the response's request id. + length, _, response_to, op_code = _UNPACK_HEADER(_receive_data_on_socket(conn, 16, deadline)) + # No request_id for exhaust cursor "getMore". + if request_id is not None: + if request_id != response_to: + raise ProtocolError(f"Got response id {response_to!r} but expected {request_id!r}") + if length <= 16: + raise ProtocolError( + f"Message length ({length!r}) not longer than standard message header size (16)" + ) + if length > max_message_size: + raise ProtocolError( + "Message length ({!r}) is larger than server max " + "message size ({!r})".format(length, max_message_size) + ) + if op_code == 2012: + op_code, _, compressor_id = _UNPACK_COMPRESSION_HEADER( + _receive_data_on_socket(conn, 9, deadline) + ) + data = decompress(_receive_data_on_socket(conn, length - 25, deadline), compressor_id) + else: + data = _receive_data_on_socket(conn, length - 16, deadline) + + try: + unpack_reply = _UNPACK_REPLY[op_code] + except KeyError: + raise ProtocolError(f"Got opcode {op_code!r} but expected {_UNPACK_REPLY.keys()!r}") + return unpack_reply(data) + + +_POLL_TIMEOUT = 0.5 + + +def wait_for_read(conn: Connection, deadline: Optional[float]) -> None: + """Block until at least one byte is read, or a timeout, or a cancel.""" + context = conn.cancel_context + # Only Monitor connections can be cancelled. + if context: + sock = conn.conn + timed_out = False + while True: + # SSLSocket can have buffered data which won't be caught by select. + if hasattr(sock, "pending") and sock.pending() > 0: + readable = True + else: + # Wait up to 500ms for the socket to become readable and then + # check for cancellation. + if deadline: + remaining = deadline - time.monotonic() + # When the timeout has expired perform one final check to + # see if the socket is readable. This helps avoid spurious + # timeouts on AWS Lambda and other FaaS environments. + if remaining <= 0: + timed_out = True + timeout = max(min(remaining, _POLL_TIMEOUT), 0) + else: + timeout = _POLL_TIMEOUT + readable = conn.socket_checker.select(sock, read=True, timeout=timeout) + if context.cancelled: + raise _OperationCancelled("hello cancelled") + if readable: + return + if timed_out: + raise socket.timeout("timed out") + + +# Errors raised by sockets (and TLS sockets) when in non-blocking mode. +BLOCKING_IO_ERRORS = (BlockingIOError, *ssl_support.BLOCKING_IO_ERRORS) + + +def _receive_data_on_socket(conn: Connection, length: int, deadline: Optional[float]) -> memoryview: + buf = bytearray(length) + mv = memoryview(buf) + bytes_read = 0 + while bytes_read < length: + try: + wait_for_read(conn, deadline) + # CSOT: Update timeout. When the timeout has expired perform one + # final non-blocking recv. This helps avoid spurious timeouts when + # the response is actually already buffered on the client. + if _csot.get_timeout() and deadline is not None: + conn.set_conn_timeout(max(deadline - time.monotonic(), 0)) + chunk_length = conn.conn.recv_into(mv[bytes_read:]) + except BLOCKING_IO_ERRORS: + raise socket.timeout("timed out") + except OSError as exc: # noqa: B014 + if _errno_from_exception(exc) == errno.EINTR: + continue + raise + if chunk_length == 0: + raise OSError("connection closed") + + bytes_read += chunk_length + + return mv diff --git a/backend/test/lib/python3.8/site-packages/pymongo/ocsp_cache.py b/backend/test/lib/python3.8/site-packages/pymongo/ocsp_cache.py new file mode 100644 index 0000000000000000000000000000000000000000..033a7b607ac46613fbc33cbc9aaf5cfd416fd64e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/ocsp_cache.py @@ -0,0 +1,108 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for caching OCSP responses.""" + +from __future__ import annotations + +from collections import namedtuple +from datetime import datetime as _datetime +from datetime import timezone +from typing import TYPE_CHECKING, Any, Dict + +from pymongo.lock import _create_lock + +if TYPE_CHECKING: + from cryptography.x509.ocsp import OCSPRequest, OCSPResponse + + +class _OCSPCache: + """A cache for OCSP responses.""" + + CACHE_KEY_TYPE = namedtuple( # type: ignore + "OcspResponseCacheKey", + ["hash_algorithm", "issuer_name_hash", "issuer_key_hash", "serial_number"], + ) + + def __init__(self) -> None: + self._data: Dict[Any, OCSPResponse] = {} + # Hold this lock when accessing _data. + self._lock = _create_lock() + + def _get_cache_key(self, ocsp_request: OCSPRequest) -> CACHE_KEY_TYPE: + return self.CACHE_KEY_TYPE( + hash_algorithm=ocsp_request.hash_algorithm.name.lower(), + issuer_name_hash=ocsp_request.issuer_name_hash, + issuer_key_hash=ocsp_request.issuer_key_hash, + serial_number=ocsp_request.serial_number, + ) + + def __setitem__(self, key: OCSPRequest, value: OCSPResponse) -> None: + """Add/update a cache entry. + + 'key' is of type cryptography.x509.ocsp.OCSPRequest + 'value' is of type cryptography.x509.ocsp.OCSPResponse + + Validity of the OCSP response must be checked by caller. + """ + with self._lock: + cache_key = self._get_cache_key(key) + + # As per the OCSP protocol, if the response's nextUpdate field is + # not set, the responder is indicating that newer revocation + # information is available all the time. + if value.next_update is None: + self._data.pop(cache_key, None) + return + + # Do nothing if the response is invalid. + if not ( + value.this_update + <= _datetime.now(tz=timezone.utc).replace(tzinfo=None) + < value.next_update + ): + return + + # Cache new response OR update cached response if new response + # has longer validity. + cached_value = self._data.get(cache_key, None) + if cached_value is None or ( + cached_value.next_update is not None + and cached_value.next_update < value.next_update + ): + self._data[cache_key] = value + + def __getitem__(self, item: OCSPRequest) -> OCSPResponse: + """Get a cache entry if it exists. + + 'item' is of type cryptography.x509.ocsp.OCSPRequest + + Raises KeyError if the item is not in the cache. + """ + with self._lock: + cache_key = self._get_cache_key(item) + value = self._data[cache_key] + + # Return cached response if it is still valid. + assert value.this_update is not None + assert value.next_update is not None + if ( + value.this_update + <= _datetime.now(tz=timezone.utc).replace(tzinfo=None) + < value.next_update + ): + return value + + self._data.pop(cache_key, None) + raise KeyError(cache_key) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/ocsp_support.py b/backend/test/lib/python3.8/site-packages/pymongo/ocsp_support.py new file mode 100644 index 0000000000000000000000000000000000000000..292ee1bbf87f22251c437332ca5210540f9e080a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/ocsp_support.py @@ -0,0 +1,432 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Support for requesting and verifying OCSP responses.""" +from __future__ import annotations + +import logging as _logging +import re as _re +from datetime import datetime as _datetime +from datetime import timezone +from typing import TYPE_CHECKING, Iterable, List, Optional, Type, Union + +from cryptography.exceptions import InvalidSignature as _InvalidSignature +from cryptography.hazmat.backends import default_backend as _default_backend +from cryptography.hazmat.primitives.asymmetric.dsa import DSAPublicKey as _DSAPublicKey +from cryptography.hazmat.primitives.asymmetric.ec import ECDSA as _ECDSA +from cryptography.hazmat.primitives.asymmetric.ec import ( + EllipticCurvePublicKey as _EllipticCurvePublicKey, +) +from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15 as _PKCS1v15 +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey as _RSAPublicKey +from cryptography.hazmat.primitives.asymmetric.x448 import ( + X448PublicKey as _X448PublicKey, +) +from cryptography.hazmat.primitives.asymmetric.x25519 import ( + X25519PublicKey as _X25519PublicKey, +) +from cryptography.hazmat.primitives.hashes import SHA1 as _SHA1 +from cryptography.hazmat.primitives.hashes import Hash as _Hash +from cryptography.hazmat.primitives.serialization import Encoding as _Encoding +from cryptography.hazmat.primitives.serialization import PublicFormat as _PublicFormat +from cryptography.x509 import AuthorityInformationAccess as _AuthorityInformationAccess +from cryptography.x509 import ExtendedKeyUsage as _ExtendedKeyUsage +from cryptography.x509 import ExtensionNotFound as _ExtensionNotFound +from cryptography.x509 import TLSFeature as _TLSFeature +from cryptography.x509 import TLSFeatureType as _TLSFeatureType +from cryptography.x509 import load_pem_x509_certificate as _load_pem_x509_certificate +from cryptography.x509.ocsp import OCSPCertStatus as _OCSPCertStatus +from cryptography.x509.ocsp import OCSPRequestBuilder as _OCSPRequestBuilder +from cryptography.x509.ocsp import OCSPResponseStatus as _OCSPResponseStatus +from cryptography.x509.ocsp import load_der_ocsp_response as _load_der_ocsp_response +from cryptography.x509.oid import ( + AuthorityInformationAccessOID as _AuthorityInformationAccessOID, +) +from cryptography.x509.oid import ExtendedKeyUsageOID as _ExtendedKeyUsageOID +from requests import post as _post +from requests.exceptions import RequestException as _RequestException + +from pymongo import _csot + +if TYPE_CHECKING: + from cryptography.hazmat.primitives.asymmetric import ( + dsa, + ec, + ed448, + ed25519, + rsa, + x448, + x25519, + ) + from cryptography.hazmat.primitives.asymmetric.utils import Prehashed + from cryptography.hazmat.primitives.hashes import HashAlgorithm + from cryptography.x509 import Certificate, Name + from cryptography.x509.extensions import Extension, ExtensionTypeVar + from cryptography.x509.ocsp import OCSPRequest, OCSPResponse + from OpenSSL.SSL import Connection + + from pymongo.ocsp_cache import _OCSPCache + from pymongo.pyopenssl_context import _CallbackData + + CertificateIssuerPublicKeyTypes = Union[ + dsa.DSAPublicKey, + rsa.RSAPublicKey, + ec.EllipticCurvePublicKey, + ed25519.Ed25519PublicKey, + ed448.Ed448PublicKey, + x25519.X25519PublicKey, + x448.X448PublicKey, + ] + +# Note: the functions in this module generally return 1 or 0. The reason +# is simple. The entry point, ocsp_callback, is registered as a callback +# with OpenSSL through PyOpenSSL. The callback must return 1 (success) or +# 0 (failure). + +_LOGGER = _logging.getLogger(__name__) + +_CERT_REGEX = _re.compile( + b"-----BEGIN CERTIFICATE[^\r\n]+.+?-----END CERTIFICATE[^\r\n]+", _re.DOTALL +) + + +def _load_trusted_ca_certs(cafile: str) -> List[Certificate]: + """Parse the tlsCAFile into a list of certificates.""" + with open(cafile, "rb") as f: + data = f.read() + + # Load all the certs in the file. + trusted_ca_certs = [] + backend = _default_backend() + for cert_data in _re.findall(_CERT_REGEX, data): + trusted_ca_certs.append(_load_pem_x509_certificate(cert_data, backend)) + return trusted_ca_certs + + +def _get_issuer_cert( + cert: Certificate, chain: Iterable[Certificate], trusted_ca_certs: Optional[List[Certificate]] +) -> Optional[Certificate]: + issuer_name = cert.issuer + for candidate in chain: + if candidate.subject == issuer_name: + return candidate + + # Depending on the server's TLS library, the peer's cert chain may not + # include the self signed root CA. In this case we check the user + # provided tlsCAFile for the issuer. + # Remove once we use the verified peer cert chain in PYTHON-2147. + if trusted_ca_certs: + for candidate in trusted_ca_certs: + if candidate.subject == issuer_name: + return candidate + return None + + +def _verify_signature( + key: CertificateIssuerPublicKeyTypes, + signature: bytes, + algorithm: Union[Prehashed, HashAlgorithm, None], + data: bytes, +) -> int: + # See cryptography.x509.Certificate.public_key + # for the public key types. + try: + if isinstance(key, _RSAPublicKey): + key.verify(signature, data, _PKCS1v15(), algorithm) # type: ignore[arg-type] + elif isinstance(key, _DSAPublicKey): + key.verify(signature, data, algorithm) # type: ignore[arg-type] + elif isinstance(key, _EllipticCurvePublicKey): + key.verify(signature, data, _ECDSA(algorithm)) # type: ignore[arg-type] + elif isinstance( + key, (_X25519PublicKey, _X448PublicKey) + ): # Curve25519 and Curve448 keys do not require verification + return 1 + else: + key.verify(signature, data) + except _InvalidSignature: + return 0 + return 1 + + +def _get_extension( + cert: Certificate, klass: Type[ExtensionTypeVar] +) -> Optional[Extension[ExtensionTypeVar]]: + try: + return cert.extensions.get_extension_for_class(klass) + except _ExtensionNotFound: + return None + + +def _public_key_hash(cert: Certificate) -> bytes: + public_key = cert.public_key() + # https://tools.ietf.org/html/rfc2560#section-4.2.1 + # "KeyHash ::= OCTET STRING -- SHA-1 hash of responder's public key + # (excluding the tag and length fields)" + # https://stackoverflow.com/a/46309453/600498 + if isinstance(public_key, _RSAPublicKey): + pbytes = public_key.public_bytes(_Encoding.DER, _PublicFormat.PKCS1) + elif isinstance(public_key, _EllipticCurvePublicKey): + pbytes = public_key.public_bytes(_Encoding.X962, _PublicFormat.UncompressedPoint) + else: + pbytes = public_key.public_bytes(_Encoding.DER, _PublicFormat.SubjectPublicKeyInfo) + digest = _Hash(_SHA1(), backend=_default_backend()) + digest.update(pbytes) + return digest.finalize() + + +def _get_certs_by_key_hash( + certificates: Iterable[Certificate], issuer: Certificate, responder_key_hash: Optional[bytes] +) -> List[Certificate]: + return [ + cert + for cert in certificates + if _public_key_hash(cert) == responder_key_hash and cert.issuer == issuer.subject + ] + + +def _get_certs_by_name( + certificates: Iterable[Certificate], issuer: Certificate, responder_name: Optional[Name] +) -> List[Certificate]: + return [ + cert + for cert in certificates + if cert.subject == responder_name and cert.issuer == issuer.subject + ] + + +def _verify_response_signature(issuer: Certificate, response: OCSPResponse) -> int: + # Response object will have a responder_name or responder_key_hash + # not both. + name = response.responder_name + rkey_hash = response.responder_key_hash + ikey_hash = response.issuer_key_hash + if name is not None and name == issuer.subject or rkey_hash == ikey_hash: + _LOGGER.debug("Responder is issuer") + # Responder is the issuer + responder_cert = issuer + else: + _LOGGER.debug("Responder is a delegate") + # Responder is a delegate + # https://tools.ietf.org/html/rfc6960#section-2.6 + # RFC6960, Section 3.2, Number 3 + certs = response.certificates + if response.responder_name is not None: + responder_certs = _get_certs_by_name(certs, issuer, name) + _LOGGER.debug("Using responder name") + else: + responder_certs = _get_certs_by_key_hash(certs, issuer, rkey_hash) + _LOGGER.debug("Using key hash") + if not responder_certs: + _LOGGER.debug("No matching or valid responder certs.") + return 0 + # XXX: Can there be more than one? If so, should we try each one + # until we find one that passes signature verification? + responder_cert = responder_certs[0] + + # RFC6960, Section 3.2, Number 4 + ext = _get_extension(responder_cert, _ExtendedKeyUsage) + if not ext or _ExtendedKeyUsageOID.OCSP_SIGNING not in ext.value: + _LOGGER.debug("Delegate not authorized for OCSP signing") + return 0 + if not _verify_signature( + issuer.public_key(), + responder_cert.signature, + responder_cert.signature_hash_algorithm, + responder_cert.tbs_certificate_bytes, + ): + _LOGGER.debug("Delegate signature verification failed") + return 0 + # RFC6960, Section 3.2, Number 2 + ret = _verify_signature( + responder_cert.public_key(), + response.signature, + response.signature_hash_algorithm, + response.tbs_response_bytes, + ) + if not ret: + _LOGGER.debug("Response signature verification failed") + return ret + + +def _build_ocsp_request(cert: Certificate, issuer: Certificate) -> OCSPRequest: + # https://cryptography.io/en/latest/x509/ocsp/#creating-requests + builder = _OCSPRequestBuilder() + builder = builder.add_certificate(cert, issuer, _SHA1()) + return builder.build() + + +def _verify_response(issuer: Certificate, response: OCSPResponse) -> int: + _LOGGER.debug("Verifying response") + # RFC6960, Section 3.2, Number 2, 3 and 4 happen here. + res = _verify_response_signature(issuer, response) + if not res: + return 0 + + # Note that we are not using a "tolerance period" as discussed in + # https://tools.ietf.org/rfc/rfc5019.txt? + now = _datetime.now(tz=timezone.utc).replace(tzinfo=None) + # RFC6960, Section 3.2, Number 5 + if response.this_update > now: + _LOGGER.debug("thisUpdate is in the future") + return 0 + # RFC6960, Section 3.2, Number 6 + if response.next_update and response.next_update < now: + _LOGGER.debug("nextUpdate is in the past") + return 0 + return 1 + + +def _get_ocsp_response( + cert: Certificate, issuer: Certificate, uri: Union[str, bytes], ocsp_response_cache: _OCSPCache +) -> Optional[OCSPResponse]: + ocsp_request = _build_ocsp_request(cert, issuer) + try: + ocsp_response = ocsp_response_cache[ocsp_request] + _LOGGER.debug("Using cached OCSP response.") + except KeyError: + # CSOT: use the configured timeout or 5 seconds, whichever is smaller. + # Note that request's timeout works differently and does not imply an absolute + # deadline: https://requests.readthedocs.io/en/stable/user/quickstart/#timeouts + timeout = max(_csot.clamp_remaining(5), 0.001) + try: + response = _post( + uri, + data=ocsp_request.public_bytes(_Encoding.DER), + headers={"Content-Type": "application/ocsp-request"}, + timeout=timeout, + ) + except _RequestException as exc: + _LOGGER.debug("HTTP request failed: %s", exc) + return None + if response.status_code != 200: + _LOGGER.debug("HTTP request returned %d", response.status_code) + return None + ocsp_response = _load_der_ocsp_response(response.content) + _LOGGER.debug("OCSP response status: %r", ocsp_response.response_status) + if ocsp_response.response_status != _OCSPResponseStatus.SUCCESSFUL: + return None + # RFC6960, Section 3.2, Number 1. Only relevant if we need to + # talk to the responder directly. + # Accessing response.serial_number raises if response status is not + # SUCCESSFUL. + if ocsp_response.serial_number != ocsp_request.serial_number: + _LOGGER.debug("Response serial number does not match request") + return None + if not _verify_response(issuer, ocsp_response): + # The response failed verification. + return None + _LOGGER.debug("Caching OCSP response.") + ocsp_response_cache[ocsp_request] = ocsp_response + + return ocsp_response + + +def _ocsp_callback(conn: Connection, ocsp_bytes: bytes, user_data: Optional[_CallbackData]) -> bool: + """Callback for use with OpenSSL.SSL.Context.set_ocsp_client_callback.""" + # always pass in user_data but OpenSSL requires it be optional + assert user_data + pycert = conn.get_peer_certificate() + if pycert is None: + _LOGGER.debug("No peer cert?") + return False + cert = pycert.to_cryptography() + # Use the verified chain when available (pyopenssl>=20.0). + if hasattr(conn, "get_verified_chain"): + pychain = conn.get_verified_chain() + trusted_ca_certs = None + else: + pychain = conn.get_peer_cert_chain() + trusted_ca_certs = user_data.trusted_ca_certs + if not pychain: + _LOGGER.debug("No peer cert chain?") + return False + chain = [cer.to_cryptography() for cer in pychain] + issuer = _get_issuer_cert(cert, chain, trusted_ca_certs) + must_staple = False + # https://tools.ietf.org/html/rfc7633#section-4.2.3.1 + ext_tls = _get_extension(cert, _TLSFeature) + if ext_tls is not None: + for feature in ext_tls.value: + if feature == _TLSFeatureType.status_request: + _LOGGER.debug("Peer presented a must-staple cert") + must_staple = True + break + ocsp_response_cache = user_data.ocsp_response_cache + + # No stapled OCSP response + if ocsp_bytes == b"": + _LOGGER.debug("Peer did not staple an OCSP response") + if must_staple: + _LOGGER.debug("Must-staple cert with no stapled response, hard fail.") + return False + if not user_data.check_ocsp_endpoint: + _LOGGER.debug("OCSP endpoint checking is disabled, soft fail.") + # No stapled OCSP response, checking responder URI disabled, soft fail. + return True + # https://tools.ietf.org/html/rfc6960#section-3.1 + ext_aia = _get_extension(cert, _AuthorityInformationAccess) + if ext_aia is None: + _LOGGER.debug("No authority access information, soft fail") + # No stapled OCSP response, no responder URI, soft fail. + return True + uris = [ + desc.access_location.value + for desc in ext_aia.value + if desc.access_method == _AuthorityInformationAccessOID.OCSP + ] + if not uris: + _LOGGER.debug("No OCSP URI, soft fail") + # No responder URI, soft fail. + return True + if issuer is None: + _LOGGER.debug("No issuer cert?") + return False + _LOGGER.debug("Requesting OCSP data") + # When requesting data from an OCSP endpoint we only fail on + # successful, valid responses with a certificate status of REVOKED. + for uri in uris: + _LOGGER.debug("Trying %s", uri) + response = _get_ocsp_response(cert, issuer, uri, ocsp_response_cache) + if response is None: + # The endpoint didn't respond in time, or the response was + # unsuccessful or didn't match the request, or the response + # failed verification. + continue + _LOGGER.debug("OCSP cert status: %r", response.certificate_status) + if response.certificate_status == _OCSPCertStatus.GOOD: + return True + if response.certificate_status == _OCSPCertStatus.REVOKED: + return False + # Soft fail if we couldn't get a definitive status. + _LOGGER.debug("No definitive OCSP cert status, soft fail") + return True + + _LOGGER.debug("Peer stapled an OCSP response") + if issuer is None: + _LOGGER.debug("No issuer cert?") + return False + response = _load_der_ocsp_response(ocsp_bytes) + _LOGGER.debug("OCSP response status: %r", response.response_status) + # This happens in _request_ocsp when there is no stapled response so + # we know if we can compare serial numbers for the request and response. + if response.response_status != _OCSPResponseStatus.SUCCESSFUL: + return False + if not _verify_response(issuer, response): + return False + # Cache the verified, stapled response. + ocsp_response_cache[_build_ocsp_request(cert, issuer)] = response + _LOGGER.debug("OCSP cert status: %r", response.certificate_status) + if response.certificate_status == _OCSPCertStatus.REVOKED: + return False + return True diff --git a/backend/test/lib/python3.8/site-packages/pymongo/operations.py b/backend/test/lib/python3.8/site-packages/pymongo/operations.py new file mode 100644 index 0000000000000000000000000000000000000000..a72dd523ba7dcda63d3b518c1872145f042d5c3c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/operations.py @@ -0,0 +1,573 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Operation class definitions.""" +from __future__ import annotations + +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generic, + List, + Mapping, + Optional, + Sequence, + Tuple, + Union, +) + +from bson.raw_bson import RawBSONDocument +from pymongo import helpers +from pymongo.collation import validate_collation_or_none +from pymongo.common import validate_boolean, validate_is_mapping, validate_list +from pymongo.helpers import _gen_index_name, _index_document, _index_list +from pymongo.typings import _CollationIn, _DocumentType, _Pipeline + +if TYPE_CHECKING: + from bson.son import SON + from pymongo.bulk import _Bulk + +# Hint supports index name, "myIndex", or list of either strings or index pairs: [('x', 1), ('y', -1), 'z''] +_IndexList = Sequence[Union[str, Tuple[str, Union[int, str, Mapping[str, Any]]]]] +_IndexKeyHint = Union[str, _IndexList] + + +class InsertOne(Generic[_DocumentType]): + """Represents an insert_one operation.""" + + __slots__ = ("_doc",) + + def __init__(self, document: _DocumentType) -> None: + """Create an InsertOne instance. + + For use with :meth:`~pymongo.collection.Collection.bulk_write`. + + :Parameters: + - `document`: The document to insert. If the document is missing an + _id field one will be added. + """ + self._doc = document + + def _add_to_bulk(self, bulkobj: _Bulk) -> None: + """Add this operation to the _Bulk instance `bulkobj`.""" + bulkobj.add_insert(self._doc) # type: ignore[arg-type] + + def __repr__(self) -> str: + return f"InsertOne({self._doc!r})" + + def __eq__(self, other: Any) -> bool: + if type(other) == type(self): + return other._doc == self._doc + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + +class DeleteOne: + """Represents a delete_one operation.""" + + __slots__ = ("_filter", "_collation", "_hint") + + def __init__( + self, + filter: Mapping[str, Any], + collation: Optional[_CollationIn] = None, + hint: Optional[_IndexKeyHint] = None, + ) -> None: + """Create a DeleteOne instance. + + For use with :meth:`~pymongo.collection.Collection.bulk_write`. + + :Parameters: + - `filter`: A query that matches the document to delete. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.4 and above. + + .. versionchanged:: 3.11 + Added the ``hint`` option. + .. versionchanged:: 3.5 + Added the `collation` option. + """ + if filter is not None: + validate_is_mapping("filter", filter) + if hint is not None and not isinstance(hint, str): + self._hint: Union[str, SON[str, Any], None] = helpers._index_document(hint) + else: + self._hint = hint + self._filter = filter + self._collation = collation + + def _add_to_bulk(self, bulkobj: _Bulk) -> None: + """Add this operation to the _Bulk instance `bulkobj`.""" + bulkobj.add_delete( + self._filter, + 1, + collation=validate_collation_or_none(self._collation), + hint=self._hint, + ) + + def __repr__(self) -> str: + return f"DeleteOne({self._filter!r}, {self._collation!r})" + + def __eq__(self, other: Any) -> bool: + if type(other) == type(self): + return (other._filter, other._collation) == (self._filter, self._collation) + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + +class DeleteMany: + """Represents a delete_many operation.""" + + __slots__ = ("_filter", "_collation", "_hint") + + def __init__( + self, + filter: Mapping[str, Any], + collation: Optional[_CollationIn] = None, + hint: Optional[_IndexKeyHint] = None, + ) -> None: + """Create a DeleteMany instance. + + For use with :meth:`~pymongo.collection.Collection.bulk_write`. + + :Parameters: + - `filter`: A query that matches the documents to delete. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.4 and above. + + .. versionchanged:: 3.11 + Added the ``hint`` option. + .. versionchanged:: 3.5 + Added the `collation` option. + """ + if filter is not None: + validate_is_mapping("filter", filter) + if hint is not None and not isinstance(hint, str): + self._hint: Union[str, SON[str, Any], None] = helpers._index_document(hint) + else: + self._hint = hint + self._filter = filter + self._collation = collation + + def _add_to_bulk(self, bulkobj: _Bulk) -> None: + """Add this operation to the _Bulk instance `bulkobj`.""" + bulkobj.add_delete( + self._filter, + 0, + collation=validate_collation_or_none(self._collation), + hint=self._hint, + ) + + def __repr__(self) -> str: + return f"DeleteMany({self._filter!r}, {self._collation!r})" + + def __eq__(self, other: Any) -> bool: + if type(other) == type(self): + return (other._filter, other._collation) == (self._filter, self._collation) + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + +class ReplaceOne(Generic[_DocumentType]): + """Represents a replace_one operation.""" + + __slots__ = ("_filter", "_doc", "_upsert", "_collation", "_hint") + + def __init__( + self, + filter: Mapping[str, Any], + replacement: Union[_DocumentType, RawBSONDocument], + upsert: bool = False, + collation: Optional[_CollationIn] = None, + hint: Optional[_IndexKeyHint] = None, + ) -> None: + """Create a ReplaceOne instance. + + For use with :meth:`~pymongo.collection.Collection.bulk_write`. + + :Parameters: + - `filter`: A query that matches the document to replace. + - `replacement`: The new document. + - `upsert` (optional): If ``True``, perform an insert if no documents + match the filter. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.2 and above. + + .. versionchanged:: 3.11 + Added the ``hint`` option. + .. versionchanged:: 3.5 + Added the ``collation`` option. + """ + if filter is not None: + validate_is_mapping("filter", filter) + if upsert is not None: + validate_boolean("upsert", upsert) + if hint is not None and not isinstance(hint, str): + self._hint: Union[str, SON[str, Any], None] = helpers._index_document(hint) + else: + self._hint = hint + self._filter = filter + self._doc = replacement + self._upsert = upsert + self._collation = collation + + def _add_to_bulk(self, bulkobj: _Bulk) -> None: + """Add this operation to the _Bulk instance `bulkobj`.""" + bulkobj.add_replace( + self._filter, + self._doc, + self._upsert, + collation=validate_collation_or_none(self._collation), + hint=self._hint, + ) + + def __eq__(self, other: Any) -> bool: + if type(other) == type(self): + return (other._filter, other._doc, other._upsert, other._collation, other._hint,) == ( + self._filter, + self._doc, + self._upsert, + self._collation, + other._hint, + ) + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __repr__(self) -> str: + return "{}({!r}, {!r}, {!r}, {!r}, {!r})".format( + self.__class__.__name__, + self._filter, + self._doc, + self._upsert, + self._collation, + self._hint, + ) + + +class _UpdateOp: + """Private base class for update operations.""" + + __slots__ = ("_filter", "_doc", "_upsert", "_collation", "_array_filters", "_hint") + + def __init__( + self, + filter: Mapping[str, Any], + doc: Union[Mapping[str, Any], _Pipeline], + upsert: bool, + collation: Optional[_CollationIn], + array_filters: Optional[List[Mapping[str, Any]]], + hint: Optional[_IndexKeyHint], + ): + if filter is not None: + validate_is_mapping("filter", filter) + if upsert is not None: + validate_boolean("upsert", upsert) + if array_filters is not None: + validate_list("array_filters", array_filters) + if hint is not None and not isinstance(hint, str): + self._hint: Union[str, SON[str, Any], None] = helpers._index_document(hint) + else: + self._hint = hint + + self._filter = filter + self._doc = doc + self._upsert = upsert + self._collation = collation + self._array_filters = array_filters + + def __eq__(self, other: object) -> bool: + if isinstance(other, type(self)): + return ( + other._filter, + other._doc, + other._upsert, + other._collation, + other._array_filters, + other._hint, + ) == ( + self._filter, + self._doc, + self._upsert, + self._collation, + self._array_filters, + self._hint, + ) + return NotImplemented + + def __repr__(self) -> str: + return "{}({!r}, {!r}, {!r}, {!r}, {!r}, {!r})".format( + self.__class__.__name__, + self._filter, + self._doc, + self._upsert, + self._collation, + self._array_filters, + self._hint, + ) + + +class UpdateOne(_UpdateOp): + """Represents an update_one operation.""" + + __slots__ = () + + def __init__( + self, + filter: Mapping[str, Any], + update: Union[Mapping[str, Any], _Pipeline], + upsert: bool = False, + collation: Optional[_CollationIn] = None, + array_filters: Optional[List[Mapping[str, Any]]] = None, + hint: Optional[_IndexKeyHint] = None, + ) -> None: + """Represents an update_one operation. + + For use with :meth:`~pymongo.collection.Collection.bulk_write`. + + :Parameters: + - `filter`: A query that matches the document to update. + - `update`: The modifications to apply. + - `upsert` (optional): If ``True``, perform an insert if no documents + match the filter. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `array_filters` (optional): A list of filters specifying which + array elements an update should apply. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.2 and above. + + .. versionchanged:: 3.11 + Added the `hint` option. + .. versionchanged:: 3.9 + Added the ability to accept a pipeline as the `update`. + .. versionchanged:: 3.6 + Added the `array_filters` option. + .. versionchanged:: 3.5 + Added the `collation` option. + """ + super().__init__(filter, update, upsert, collation, array_filters, hint) + + def _add_to_bulk(self, bulkobj: _Bulk) -> None: + """Add this operation to the _Bulk instance `bulkobj`.""" + bulkobj.add_update( + self._filter, + self._doc, + False, + self._upsert, + collation=validate_collation_or_none(self._collation), + array_filters=self._array_filters, + hint=self._hint, + ) + + +class UpdateMany(_UpdateOp): + """Represents an update_many operation.""" + + __slots__ = () + + def __init__( + self, + filter: Mapping[str, Any], + update: Union[Mapping[str, Any], _Pipeline], + upsert: bool = False, + collation: Optional[_CollationIn] = None, + array_filters: Optional[List[Mapping[str, Any]]] = None, + hint: Optional[_IndexKeyHint] = None, + ) -> None: + """Create an UpdateMany instance. + + For use with :meth:`~pymongo.collection.Collection.bulk_write`. + + :Parameters: + - `filter`: A query that matches the documents to update. + - `update`: The modifications to apply. + - `upsert` (optional): If ``True``, perform an insert if no documents + match the filter. + - `collation` (optional): An instance of + :class:`~pymongo.collation.Collation`. + - `array_filters` (optional): A list of filters specifying which + array elements an update should apply. + - `hint` (optional): An index to use to support the query + predicate specified either by its string name, or in the same + format as passed to + :meth:`~pymongo.collection.Collection.create_index` (e.g. + ``[('field', ASCENDING)]``). This option is only supported on + MongoDB 4.2 and above. + + .. versionchanged:: 3.11 + Added the `hint` option. + .. versionchanged:: 3.9 + Added the ability to accept a pipeline as the `update`. + .. versionchanged:: 3.6 + Added the `array_filters` option. + .. versionchanged:: 3.5 + Added the `collation` option. + """ + super().__init__(filter, update, upsert, collation, array_filters, hint) + + def _add_to_bulk(self, bulkobj: _Bulk) -> None: + """Add this operation to the _Bulk instance `bulkobj`.""" + bulkobj.add_update( + self._filter, + self._doc, + True, + self._upsert, + collation=validate_collation_or_none(self._collation), + array_filters=self._array_filters, + hint=self._hint, + ) + + +class IndexModel: + """Represents an index to create.""" + + __slots__ = ("__document",) + + def __init__(self, keys: _IndexKeyHint, **kwargs: Any) -> None: + """Create an Index instance. + + For use with :meth:`~pymongo.collection.Collection.create_indexes`. + + Takes either a single key or a list containing (key, direction) pairs + or keys. If no direction is given, :data:`~pymongo.ASCENDING` will + be assumed. + The key(s) must be an instance of :class:`str`, and the direction(s) must + be one of (:data:`~pymongo.ASCENDING`, :data:`~pymongo.DESCENDING`, + :data:`~pymongo.GEO2D`, :data:`~pymongo.GEOSPHERE`, + :data:`~pymongo.HASHED`, :data:`~pymongo.TEXT`). + + Valid options include, but are not limited to: + + - `name`: custom name to use for this index - if none is + given, a name will be generated. + - `unique`: if ``True``, creates a uniqueness constraint on the index. + - `background`: if ``True``, this index should be created in the + background. + - `sparse`: if ``True``, omit from the index any documents that lack + the indexed field. + - `bucketSize`: for use with geoHaystack indexes. + Number of documents to group together within a certain proximity + to a given longitude and latitude. + - `min`: minimum value for keys in a :data:`~pymongo.GEO2D` + index. + - `max`: maximum value for keys in a :data:`~pymongo.GEO2D` + index. + - `expireAfterSeconds`: <int> Used to create an expiring (TTL) + collection. MongoDB will automatically delete documents from + this collection after <int> seconds. The indexed field must + be a UTC datetime or the data will not expire. + - `partialFilterExpression`: A document that specifies a filter for + a partial index. + - `collation`: An instance of :class:`~pymongo.collation.Collation` + that specifies the collation to use. + - `wildcardProjection`: Allows users to include or exclude specific + field paths from a `wildcard index`_ using the { "$**" : 1} key + pattern. Requires MongoDB >= 4.2. + - `hidden`: if ``True``, this index will be hidden from the query + planner and will not be evaluated as part of query plan + selection. Requires MongoDB >= 4.4. + + See the MongoDB documentation for a full list of supported options by + server version. + + :Parameters: + - `keys`: a single key or a list containing (key, direction) pairs + or keys specifying the index to create. + - `**kwargs` (optional): any additional index creation + options (see the above list) should be passed as keyword + arguments. + + .. versionchanged:: 3.11 + Added the ``hidden`` option. + .. versionchanged:: 3.2 + Added the ``partialFilterExpression`` option to support partial + indexes. + + .. _wildcard index: https://mongodb.com/docs/master/core/index-wildcard/ + """ + keys = _index_list(keys) + if kwargs.get("name") is None: + kwargs["name"] = _gen_index_name(keys) + kwargs["key"] = _index_document(keys) + collation = validate_collation_or_none(kwargs.pop("collation", None)) + self.__document = kwargs + if collation is not None: + self.__document["collation"] = collation + + @property + def document(self) -> Dict[str, Any]: + """An index document suitable for passing to the createIndexes + command. + """ + return self.__document + + +class SearchIndexModel: + """Represents a search index to create.""" + + __slots__ = "__document" + + def __init__(self, definition: Mapping[str, Any], name: Optional[str] = None) -> None: + """Create a Search Index instance. + + For use with :meth:`~pymongo.collection.Collection.create_search_index` and :meth:`~pymongo.collection.Collection.create_search_indexes`. + + :Parameters: + - `definition` - The definition for this index. + - `name` (optional) - The name for this index, if present. + + .. versionadded:: 4.5 + + .. note:: Search indexes require a MongoDB server version 7.0+ Atlas cluster. + """ + if name is not None: + self.__document = dict(name=name, definition=definition) + else: + self.__document = dict(definition=definition) + + @property + def document(self) -> Mapping[str, Any]: + """The document for this index.""" + return self.__document diff --git a/backend/test/lib/python3.8/site-packages/pymongo/periodic_executor.py b/backend/test/lib/python3.8/site-packages/pymongo/periodic_executor.py new file mode 100644 index 0000000000000000000000000000000000000000..003b05647c86276ba4e1bed0ccdbf376712683ea --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/periodic_executor.py @@ -0,0 +1,192 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Run a target function on a background thread.""" + +from __future__ import annotations + +import threading +import time +import weakref +from typing import Any, Callable, Optional + +from pymongo.lock import _create_lock + + +class PeriodicExecutor: + def __init__( + self, + interval: float, + min_interval: float, + target: Callable[[], bool], + name: Optional[str] = None, + ): + """ "Run a target function periodically on a background thread. + + If the target's return value is false, the executor stops. + + :Parameters: + - `interval`: Seconds between calls to `target`. + - `min_interval`: Minimum seconds between calls if `wake` is + called very often. + - `target`: A function. + - `name`: A name to give the underlying thread. + """ + # threading.Event and its internal condition variable are expensive + # in Python 2, see PYTHON-983. Use a boolean to know when to wake. + # The executor's design is constrained by several Python issues, see + # "periodic_executor.rst" in this repository. + self._event = False + self._interval = interval + self._min_interval = min_interval + self._target = target + self._stopped = False + self._thread: Optional[threading.Thread] = None + self._name = name + self._skip_sleep = False + self._thread_will_exit = False + self._lock = _create_lock() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}(name={self._name}) object at 0x{id(self):x}>" + + def open(self) -> None: + """Start. Multiple calls have no effect. + + Not safe to call from multiple threads at once. + """ + with self._lock: + if self._thread_will_exit: + # If the background thread has read self._stopped as True + # there is a chance that it has not yet exited. The call to + # join should not block indefinitely because there is no + # other work done outside the while loop in self._run. + try: + assert self._thread is not None + self._thread.join() + except ReferenceError: + # Thread terminated. + pass + self._thread_will_exit = False + self._stopped = False + started: Any = False + try: + started = self._thread and self._thread.is_alive() + except ReferenceError: + # Thread terminated. + pass + + if not started: + thread = threading.Thread(target=self._run, name=self._name) + thread.daemon = True + self._thread = weakref.proxy(thread) + _register_executor(self) + thread.start() + + def close(self, dummy: Any = None) -> None: + """Stop. To restart, call open(). + + The dummy parameter allows an executor's close method to be a weakref + callback; see monitor.py. + """ + self._stopped = True + + def join(self, timeout: Optional[int] = None) -> None: + if self._thread is not None: + try: + self._thread.join(timeout) + except (ReferenceError, RuntimeError): + # Thread already terminated, or not yet started. + pass + + def wake(self) -> None: + """Execute the target function soon.""" + self._event = True + + def update_interval(self, new_interval: int) -> None: + self._interval = new_interval + + def skip_sleep(self) -> None: + self._skip_sleep = True + + def __should_stop(self) -> bool: + with self._lock: + if self._stopped: + self._thread_will_exit = True + return True + return False + + def _run(self) -> None: + while not self.__should_stop(): + try: + if not self._target(): + self._stopped = True + break + except BaseException: + with self._lock: + self._stopped = True + self._thread_will_exit = True + + raise + + if self._skip_sleep: + self._skip_sleep = False + else: + deadline = time.monotonic() + self._interval + while not self._stopped and time.monotonic() < deadline: + time.sleep(self._min_interval) + if self._event: + break # Early wake. + + self._event = False + + +# _EXECUTORS has a weakref to each running PeriodicExecutor. Once started, +# an executor is kept alive by a strong reference from its thread and perhaps +# from other objects. When the thread dies and all other referrers are freed, +# the executor is freed and removed from _EXECUTORS. If any threads are +# running when the interpreter begins to shut down, we try to halt and join +# them to avoid spurious errors. +_EXECUTORS = set() + + +def _register_executor(executor: PeriodicExecutor) -> None: + ref = weakref.ref(executor, _on_executor_deleted) + _EXECUTORS.add(ref) + + +def _on_executor_deleted(ref: weakref.ReferenceType[PeriodicExecutor]) -> None: + _EXECUTORS.remove(ref) + + +def _shutdown_executors() -> None: + if _EXECUTORS is None: + return + + # Copy the set. Stopping threads has the side effect of removing executors. + executors = list(_EXECUTORS) + + # First signal all executors to close... + for ref in executors: + executor = ref() + if executor: + executor.close() + + # ...then try to join them. + for ref in executors: + executor = ref() + if executor: + executor.join(1) + + executor = None diff --git a/backend/test/lib/python3.8/site-packages/pymongo/pool.py b/backend/test/lib/python3.8/site-packages/pymongo/pool.py new file mode 100644 index 0000000000000000000000000000000000000000..68052f64957c7f182a6964ed3ecc6bc497b66f05 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/pool.py @@ -0,0 +1,1858 @@ +# Copyright 2011-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +from __future__ import annotations + +import collections +import contextlib +import copy +import os +import platform +import socket +import ssl +import sys +import threading +import time +import weakref +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + List, + Mapping, + MutableMapping, + NoReturn, + Optional, + Sequence, + Set, + Tuple, + Union, +) + +import bson +from bson import DEFAULT_CODEC_OPTIONS +from bson.son import SON +from pymongo import __version__, _csot, auth, helpers +from pymongo.client_session import _validate_session_write_concern +from pymongo.common import ( + MAX_BSON_SIZE, + MAX_CONNECTING, + MAX_IDLE_TIME_SEC, + MAX_MESSAGE_SIZE, + MAX_POOL_SIZE, + MAX_WIRE_VERSION, + MAX_WRITE_BATCH_SIZE, + MIN_POOL_SIZE, + ORDERED_TYPES, + WAIT_QUEUE_TIMEOUT, +) +from pymongo.errors import ( + AutoReconnect, + ConfigurationError, + ConnectionFailure, + DocumentTooLarge, + ExecutionTimeout, + InvalidOperation, + NetworkTimeout, + NotPrimaryError, + OperationFailure, + PyMongoError, + WaitQueueTimeoutError, + _CertificateError, +) +from pymongo.hello import Hello, HelloCompat +from pymongo.helpers import _handle_reauth +from pymongo.lock import _create_lock +from pymongo.monitoring import ( + ConnectionCheckOutFailedReason, + ConnectionClosedReason, + _EventListeners, +) +from pymongo.network import command, receive_message +from pymongo.read_preferences import ReadPreference +from pymongo.server_api import _add_to_command +from pymongo.server_type import SERVER_TYPE +from pymongo.socket_checker import SocketChecker +from pymongo.ssl_support import HAS_SNI, SSLError + +if TYPE_CHECKING: + from bson import CodecOptions + from bson.objectid import ObjectId + from pymongo.auth import MongoCredential, _AuthContext + from pymongo.client_session import ClientSession + from pymongo.compression_support import ( + CompressionSettings, + SnappyContext, + ZlibContext, + ZstdContext, + ) + from pymongo.driver_info import DriverInfo + from pymongo.message import _OpMsg, _OpReply + from pymongo.mongo_client import MongoClient, _MongoClientErrorHandler + from pymongo.pyopenssl_context import SSLContext, _sslConn + from pymongo.read_concern import ReadConcern + from pymongo.read_preferences import _ServerMode + from pymongo.server_api import ServerApi + from pymongo.typings import ClusterTime, _Address, _CollationIn + from pymongo.write_concern import WriteConcern + +try: + from fcntl import F_GETFD, F_SETFD, FD_CLOEXEC, fcntl + + def _set_non_inheritable_non_atomic(fd: int) -> None: + """Set the close-on-exec flag on the given file descriptor.""" + flags = fcntl(fd, F_GETFD) + fcntl(fd, F_SETFD, flags | FD_CLOEXEC) + +except ImportError: + # Windows, various platforms we don't claim to support + # (Jython, IronPython, ...), systems that don't provide + # everything we need from fcntl, etc. + def _set_non_inheritable_non_atomic(fd: int) -> None: + """Dummy function for platforms that don't provide fcntl.""" + + +_MAX_TCP_KEEPIDLE = 120 +_MAX_TCP_KEEPINTVL = 10 +_MAX_TCP_KEEPCNT = 9 + +if sys.platform == "win32": + try: + import _winreg as winreg + except ImportError: + import winreg + + def _query(key, name, default): + try: + value, _ = winreg.QueryValueEx(key, name) + # Ensure the value is a number or raise ValueError. + return int(value) + except (OSError, ValueError): + # QueryValueEx raises OSError when the key does not exist (i.e. + # the system is using the Windows default value). + return default + + try: + with winreg.OpenKey( + winreg.HKEY_LOCAL_MACHINE, r"SYSTEM\CurrentControlSet\Services\Tcpip\Parameters" + ) as key: + _WINDOWS_TCP_IDLE_MS = _query(key, "KeepAliveTime", 7200000) + _WINDOWS_TCP_INTERVAL_MS = _query(key, "KeepAliveInterval", 1000) + except OSError: + # We could not check the default values because winreg.OpenKey failed. + # Assume the system is using the default values. + _WINDOWS_TCP_IDLE_MS = 7200000 + _WINDOWS_TCP_INTERVAL_MS = 1000 + + def _set_keepalive_times(sock): + idle_ms = min(_WINDOWS_TCP_IDLE_MS, _MAX_TCP_KEEPIDLE * 1000) + interval_ms = min(_WINDOWS_TCP_INTERVAL_MS, _MAX_TCP_KEEPINTVL * 1000) + if idle_ms < _WINDOWS_TCP_IDLE_MS or interval_ms < _WINDOWS_TCP_INTERVAL_MS: + sock.ioctl(socket.SIO_KEEPALIVE_VALS, (1, idle_ms, interval_ms)) + +else: + + def _set_tcp_option(sock: socket.socket, tcp_option: str, max_value: int) -> None: + if hasattr(socket, tcp_option): + sockopt = getattr(socket, tcp_option) + try: + # PYTHON-1350 - NetBSD doesn't implement getsockopt for + # TCP_KEEPIDLE and friends. Don't attempt to set the + # values there. + default = sock.getsockopt(socket.IPPROTO_TCP, sockopt) + if default > max_value: + sock.setsockopt(socket.IPPROTO_TCP, sockopt, max_value) + except OSError: + pass + + def _set_keepalive_times(sock: socket.socket) -> None: + _set_tcp_option(sock, "TCP_KEEPIDLE", _MAX_TCP_KEEPIDLE) + _set_tcp_option(sock, "TCP_KEEPINTVL", _MAX_TCP_KEEPINTVL) + _set_tcp_option(sock, "TCP_KEEPCNT", _MAX_TCP_KEEPCNT) + + +_METADATA: SON[str, Any] = SON( + [ + ("driver", SON([("name", "PyMongo"), ("version", __version__)])), + ] +) + +if sys.platform.startswith("linux"): + # platform.linux_distribution was deprecated in Python 3.5 + # and removed in Python 3.8. Starting in Python 3.5 it + # raises DeprecationWarning + # DeprecationWarning: dist() and linux_distribution() functions are deprecated in Python 3.5 + _name = platform.system() + _METADATA["os"] = SON( + [ + ("type", _name), + ("name", _name), + ("architecture", platform.machine()), + # Kernel version (e.g. 4.4.0-17-generic). + ("version", platform.release()), + ] + ) +elif sys.platform == "darwin": + _METADATA["os"] = SON( + [ + ("type", platform.system()), + ("name", platform.system()), + ("architecture", platform.machine()), + # (mac|i|tv)OS(X) version (e.g. 10.11.6) instead of darwin + # kernel version. + ("version", platform.mac_ver()[0]), + ] + ) +elif sys.platform == "win32": + _METADATA["os"] = SON( + [ + ("type", platform.system()), + # "Windows XP", "Windows 7", "Windows 10", etc. + ("name", " ".join((platform.system(), platform.release()))), + ("architecture", platform.machine()), + # Windows patch level (e.g. 5.1.2600-SP3) + ("version", "-".join(platform.win32_ver()[1:3])), + ] + ) +elif sys.platform.startswith("java"): + _name, _ver, _arch = platform.java_ver()[-1] + _METADATA["os"] = SON( + [ + # Linux, Windows 7, Mac OS X, etc. + ("type", _name), + ("name", _name), + # x86, x86_64, AMD64, etc. + ("architecture", _arch), + # Linux kernel version, OSX version, etc. + ("version", _ver), + ] + ) +else: + # Get potential alias (e.g. SunOS 5.11 becomes Solaris 2.11) + _aliased = platform.system_alias(platform.system(), platform.release(), platform.version()) + _METADATA["os"] = SON( + [ + ("type", platform.system()), + ("name", " ".join([part for part in _aliased[:2] if part])), + ("architecture", platform.machine()), + ("version", _aliased[2]), + ] + ) + +if platform.python_implementation().startswith("PyPy"): + _METADATA["platform"] = " ".join( + ( + platform.python_implementation(), + ".".join(map(str, sys.pypy_version_info)), # type: ignore + "(Python %s)" % ".".join(map(str, sys.version_info)), + ) + ) +elif sys.platform.startswith("java"): + _METADATA["platform"] = " ".join( + ( + platform.python_implementation(), + ".".join(map(str, sys.version_info)), + "(%s)" % " ".join((platform.system(), platform.release())), + ) + ) +else: + _METADATA["platform"] = " ".join( + (platform.python_implementation(), ".".join(map(str, sys.version_info))) + ) + + +def _is_lambda() -> bool: + if os.getenv("AWS_LAMBDA_RUNTIME_API"): + return True + env = os.getenv("AWS_EXECUTION_ENV") + if env: + return env.startswith("AWS_Lambda_") + return False + + +def _is_azure_func() -> bool: + return bool(os.getenv("FUNCTIONS_WORKER_RUNTIME")) + + +def _is_gcp_func() -> bool: + return bool(os.getenv("K_SERVICE") or os.getenv("FUNCTION_NAME")) + + +def _is_vercel() -> bool: + return bool(os.getenv("VERCEL")) + + +def _getenv_int(key: str) -> Optional[int]: + """Like os.getenv but returns an int, or None if the value is missing/malformed.""" + val = os.getenv(key) + if not val: + return None + try: + return int(val) + except ValueError: + return None + + +def _metadata_env() -> Dict[str, Any]: + env: Dict[str, Any] = {} + # Skip if multiple (or no) envs are matched. + if (_is_lambda(), _is_azure_func(), _is_gcp_func(), _is_vercel()).count(True) != 1: + return env + if _is_lambda(): + env["name"] = "aws.lambda" + region = os.getenv("AWS_REGION") + if region: + env["region"] = region + memory_mb = _getenv_int("AWS_LAMBDA_FUNCTION_MEMORY_SIZE") + if memory_mb is not None: + env["memory_mb"] = memory_mb + elif _is_azure_func(): + env["name"] = "azure.func" + elif _is_gcp_func(): + env["name"] = "gcp.func" + region = os.getenv("FUNCTION_REGION") + if region: + env["region"] = region + memory_mb = _getenv_int("FUNCTION_MEMORY_MB") + if memory_mb is not None: + env["memory_mb"] = memory_mb + timeout_sec = _getenv_int("FUNCTION_TIMEOUT_SEC") + if timeout_sec is not None: + env["timeout_sec"] = timeout_sec + elif _is_vercel(): + env["name"] = "vercel" + region = os.getenv("VERCEL_REGION") + if region: + env["region"] = region + return env + + +_MAX_METADATA_SIZE = 512 + + +# See: https://github.com/mongodb/specifications/blob/5112bcc/source/mongodb-handshake/handshake.rst#limitations +def _truncate_metadata(metadata: MutableMapping[str, Any]) -> None: + """Perform metadata truncation.""" + if len(bson.encode(metadata)) <= _MAX_METADATA_SIZE: + return + # 1. Omit fields from env except env.name. + env_name = metadata.get("env", {}).get("name") + if env_name: + metadata["env"] = {"name": env_name} + if len(bson.encode(metadata)) <= _MAX_METADATA_SIZE: + return + # 2. Omit fields from os except os.type. + os_type = metadata.get("os", {}).get("type") + if os_type: + metadata["os"] = {"type": os_type} + if len(bson.encode(metadata)) <= _MAX_METADATA_SIZE: + return + # 3. Omit the env document entirely. + metadata.pop("env", None) + encoded_size = len(bson.encode(metadata)) + if encoded_size <= _MAX_METADATA_SIZE: + return + # 4. Truncate platform. + overflow = encoded_size - _MAX_METADATA_SIZE + plat = metadata.get("platform", "") + if plat: + plat = plat[:-overflow] + if plat: + metadata["platform"] = plat + else: + metadata.pop("platform", None) + + +# If the first getaddrinfo call of this interpreter's life is on a thread, +# while the main thread holds the import lock, getaddrinfo deadlocks trying +# to import the IDNA codec. Import it here, where presumably we're on the +# main thread, to avoid the deadlock. See PYTHON-607. +"foo".encode("idna") + + +def _raise_connection_failure( + address: Any, error: Exception, msg_prefix: Optional[str] = None +) -> NoReturn: + """Convert a socket.error to ConnectionFailure and raise it.""" + host, port = address + # If connecting to a Unix socket, port will be None. + if port is not None: + msg = "%s:%d: %s" % (host, port, error) + else: + msg = f"{host}: {error}" + if msg_prefix: + msg = msg_prefix + msg + if isinstance(error, socket.timeout): + raise NetworkTimeout(msg) from error + elif isinstance(error, SSLError) and "timed out" in str(error): + # Eventlet does not distinguish TLS network timeouts from other + # SSLErrors (https://github.com/eventlet/eventlet/issues/692). + # Luckily, we can work around this limitation because the phrase + # 'timed out' appears in all the timeout related SSLErrors raised. + raise NetworkTimeout(msg) from error + else: + raise AutoReconnect(msg) from error + + +def _cond_wait(condition: threading.Condition, deadline: Optional[float]) -> bool: + timeout = deadline - time.monotonic() if deadline else None + return condition.wait(timeout) + + +class PoolOptions: + """Read only connection pool options for a MongoClient. + + Should not be instantiated directly by application developers. Access + a client's pool options via + :attr:`~pymongo.client_options.ClientOptions.pool_options` instead:: + + pool_opts = client.options.pool_options + pool_opts.max_pool_size + pool_opts.min_pool_size + + """ + + __slots__ = ( + "__max_pool_size", + "__min_pool_size", + "__max_idle_time_seconds", + "__connect_timeout", + "__socket_timeout", + "__wait_queue_timeout", + "__ssl_context", + "__tls_allow_invalid_hostnames", + "__event_listeners", + "__appname", + "__driver", + "__metadata", + "__compression_settings", + "__max_connecting", + "__pause_enabled", + "__server_api", + "__load_balanced", + "__credentials", + ) + + def __init__( + self, + max_pool_size: int = MAX_POOL_SIZE, + min_pool_size: int = MIN_POOL_SIZE, + max_idle_time_seconds: Optional[int] = MAX_IDLE_TIME_SEC, + connect_timeout: Optional[float] = None, + socket_timeout: Optional[float] = None, + wait_queue_timeout: Optional[int] = WAIT_QUEUE_TIMEOUT, + ssl_context: Optional[SSLContext] = None, + tls_allow_invalid_hostnames: bool = False, + event_listeners: Optional[_EventListeners] = None, + appname: Optional[str] = None, + driver: Optional[DriverInfo] = None, + compression_settings: Optional[CompressionSettings] = None, + max_connecting: int = MAX_CONNECTING, + pause_enabled: bool = True, + server_api: Optional[ServerApi] = None, + load_balanced: Optional[bool] = None, + credentials: Optional[MongoCredential] = None, + ): + self.__max_pool_size = max_pool_size + self.__min_pool_size = min_pool_size + self.__max_idle_time_seconds = max_idle_time_seconds + self.__connect_timeout = connect_timeout + self.__socket_timeout = socket_timeout + self.__wait_queue_timeout = wait_queue_timeout + self.__ssl_context = ssl_context + self.__tls_allow_invalid_hostnames = tls_allow_invalid_hostnames + self.__event_listeners = event_listeners + self.__appname = appname + self.__driver = driver + self.__compression_settings = compression_settings + self.__max_connecting = max_connecting + self.__pause_enabled = pause_enabled + self.__server_api = server_api + self.__load_balanced = load_balanced + self.__credentials = credentials + self.__metadata = copy.deepcopy(_METADATA) + if appname: + self.__metadata["application"] = {"name": appname} + + # Combine the "driver" MongoClient option with PyMongo's info, like: + # { + # 'driver': { + # 'name': 'PyMongo|MyDriver', + # 'version': '4.2.0|1.2.3', + # }, + # 'platform': 'CPython 3.7.0|MyPlatform' + # } + if driver: + if driver.name: + self.__metadata["driver"]["name"] = "{}|{}".format( + _METADATA["driver"]["name"], + driver.name, + ) + if driver.version: + self.__metadata["driver"]["version"] = "{}|{}".format( + _METADATA["driver"]["version"], + driver.version, + ) + if driver.platform: + self.__metadata["platform"] = "{}|{}".format(_METADATA["platform"], driver.platform) + + env = _metadata_env() + if env: + self.__metadata["env"] = env + + _truncate_metadata(self.__metadata) + + @property + def _credentials(self) -> Optional[MongoCredential]: + """A :class:`~pymongo.auth.MongoCredentials` instance or None.""" + return self.__credentials + + @property + def non_default_options(self) -> Dict[str, Any]: + """The non-default options this pool was created with. + + Added for CMAP's :class:`PoolCreatedEvent`. + """ + opts = {} + if self.__max_pool_size != MAX_POOL_SIZE: + opts["maxPoolSize"] = self.__max_pool_size + if self.__min_pool_size != MIN_POOL_SIZE: + opts["minPoolSize"] = self.__min_pool_size + if self.__max_idle_time_seconds != MAX_IDLE_TIME_SEC: + assert self.__max_idle_time_seconds is not None + opts["maxIdleTimeMS"] = self.__max_idle_time_seconds * 1000 + if self.__wait_queue_timeout != WAIT_QUEUE_TIMEOUT: + assert self.__wait_queue_timeout is not None + opts["waitQueueTimeoutMS"] = self.__wait_queue_timeout * 1000 + if self.__max_connecting != MAX_CONNECTING: + opts["maxConnecting"] = self.__max_connecting + return opts + + @property + def max_pool_size(self) -> float: + """The maximum allowable number of concurrent connections to each + connected server. Requests to a server will block if there are + `maxPoolSize` outstanding connections to the requested server. + Defaults to 100. Cannot be 0. + + When a server's pool has reached `max_pool_size`, operations for that + server block waiting for a socket to be returned to the pool. If + ``waitQueueTimeoutMS`` is set, a blocked operation will raise + :exc:`~pymongo.errors.ConnectionFailure` after a timeout. + By default ``waitQueueTimeoutMS`` is not set. + """ + return self.__max_pool_size + + @property + def min_pool_size(self) -> int: + """The minimum required number of concurrent connections that the pool + will maintain to each connected server. Default is 0. + """ + return self.__min_pool_size + + @property + def max_connecting(self) -> int: + """The maximum number of concurrent connection creation attempts per + pool. Defaults to 2. + """ + return self.__max_connecting + + @property + def pause_enabled(self) -> bool: + return self.__pause_enabled + + @property + def max_idle_time_seconds(self) -> Optional[int]: + """The maximum number of seconds that a connection can remain + idle in the pool before being removed and replaced. Defaults to + `None` (no limit). + """ + return self.__max_idle_time_seconds + + @property + def connect_timeout(self) -> Optional[float]: + """How long a connection can take to be opened before timing out.""" + return self.__connect_timeout + + @property + def socket_timeout(self) -> Optional[float]: + """How long a send or receive on a socket can take before timing out.""" + return self.__socket_timeout + + @property + def wait_queue_timeout(self) -> Optional[int]: + """How long a thread will wait for a socket from the pool if the pool + has no free sockets. + """ + return self.__wait_queue_timeout + + @property + def _ssl_context(self) -> Optional[SSLContext]: + """An SSLContext instance or None.""" + return self.__ssl_context + + @property + def tls_allow_invalid_hostnames(self) -> bool: + """If True skip ssl.match_hostname.""" + return self.__tls_allow_invalid_hostnames + + @property + def _event_listeners(self) -> Optional[_EventListeners]: + """An instance of pymongo.monitoring._EventListeners.""" + return self.__event_listeners + + @property + def appname(self) -> Optional[str]: + """The application name, for sending with hello in server handshake.""" + return self.__appname + + @property + def driver(self) -> Optional[DriverInfo]: + """Driver name and version, for sending with hello in handshake.""" + return self.__driver + + @property + def _compression_settings(self) -> Optional[CompressionSettings]: + return self.__compression_settings + + @property + def metadata(self) -> SON[str, Any]: + """A dict of metadata about the application, driver, os, and platform.""" + return self.__metadata.copy() + + @property + def server_api(self) -> Optional[ServerApi]: + """A pymongo.server_api.ServerApi or None.""" + return self.__server_api + + @property + def load_balanced(self) -> Optional[bool]: + """True if this Pool is configured in load balanced mode.""" + return self.__load_balanced + + +class _CancellationContext: + def __init__(self) -> None: + self._cancelled = False + + def cancel(self) -> None: + """Cancel this context.""" + self._cancelled = True + + @property + def cancelled(self) -> bool: + """Was cancel called?""" + return self._cancelled + + +class Connection: + """Store a connection with some metadata. + + :Parameters: + - `conn`: a raw connection object + - `pool`: a Pool instance + - `address`: the server's (host, port) + - `id`: the id of this socket in it's pool + """ + + def __init__( + self, conn: Union[socket.socket, _sslConn], pool: Pool, address: Tuple[str, int], id: int + ): + self.pool_ref = weakref.ref(pool) + self.conn = conn + self.address = address + self.id = id + self.closed = False + self.last_checkin_time = time.monotonic() + self.performed_handshake = False + self.is_writable: bool = False + self.max_wire_version = MAX_WIRE_VERSION + self.max_bson_size = MAX_BSON_SIZE + self.max_message_size = MAX_MESSAGE_SIZE + self.max_write_batch_size = MAX_WRITE_BATCH_SIZE + self.supports_sessions = False + self.hello_ok: bool = False + self.is_mongos = False + self.op_msg_enabled = False + self.listeners = pool.opts._event_listeners + self.enabled_for_cmap = pool.enabled_for_cmap + self.compression_settings = pool.opts._compression_settings + self.compression_context: Union[SnappyContext, ZlibContext, ZstdContext, None] = None + self.socket_checker: SocketChecker = SocketChecker() + self.oidc_token_gen_id: Optional[int] = None + # Support for mechanism negotiation on the initial handshake. + self.negotiated_mechs: Optional[List[str]] = None + self.auth_ctx: Optional[_AuthContext] = None + + # The pool's generation changes with each reset() so we can close + # sockets created before the last reset. + self.pool_gen = pool.gen + self.generation = self.pool_gen.get_overall() + self.ready = False + self.cancel_context: Optional[_CancellationContext] = None + if not pool.handshake: + # This is a Monitor connection. + self.cancel_context = _CancellationContext() + self.opts = pool.opts + self.more_to_come: bool = False + # For load balancer support. + self.service_id: Optional[ObjectId] = None + # When executing a transaction in load balancing mode, this flag is + # set to true to indicate that the session now owns the connection. + self.pinned_txn = False + self.pinned_cursor = False + self.active = False + self.last_timeout = self.opts.socket_timeout + self.connect_rtt = 0.0 + + def set_conn_timeout(self, timeout: Optional[float]) -> None: + """Cache last timeout to avoid duplicate calls to conn.settimeout.""" + if timeout == self.last_timeout: + return + self.last_timeout = timeout + self.conn.settimeout(timeout) + + def apply_timeout( + self, client: MongoClient, cmd: Optional[MutableMapping[str, Any]] + ) -> Optional[float]: + # CSOT: use remaining timeout when set. + timeout = _csot.remaining() + if timeout is None: + # Reset the socket timeout unless we're performing a streaming monitor check. + if not self.more_to_come: + self.set_conn_timeout(self.opts.socket_timeout) + return None + # RTT validation. + rtt = _csot.get_rtt() + if rtt is None: + rtt = self.connect_rtt + max_time_ms = timeout - rtt + if max_time_ms < 0: + # CSOT: raise an error without running the command since we know it will time out. + errmsg = f"operation would exceed time limit, remaining timeout:{timeout:.5f} <= network round trip time:{rtt:.5f}" + raise ExecutionTimeout( + errmsg, 50, {"ok": 0, "errmsg": errmsg, "code": 50}, self.max_wire_version + ) + if cmd is not None: + cmd["maxTimeMS"] = int(max_time_ms * 1000) + self.set_conn_timeout(timeout) + return timeout + + def pin_txn(self) -> None: + self.pinned_txn = True + assert not self.pinned_cursor + + def pin_cursor(self) -> None: + self.pinned_cursor = True + assert not self.pinned_txn + + def unpin(self) -> None: + pool = self.pool_ref() + if pool: + pool.checkin(self) + else: + self.close_conn(ConnectionClosedReason.STALE) + + def hello_cmd(self) -> SON[str, Any]: + # Handshake spec requires us to use OP_MSG+hello command for the + # initial handshake in load balanced or stable API mode. + if self.opts.server_api or self.hello_ok or self.opts.load_balanced: + self.op_msg_enabled = True + return SON([(HelloCompat.CMD, 1)]) + else: + return SON([(HelloCompat.LEGACY_CMD, 1), ("helloOk", True)]) + + def hello(self) -> Hello[Dict[str, Any]]: + return self._hello(None, None, None) + + def _hello( + self, + cluster_time: Optional[ClusterTime], + topology_version: Optional[Any], + heartbeat_frequency: Optional[int], + ) -> Hello[Dict[str, Any]]: + cmd = self.hello_cmd() + performing_handshake = not self.performed_handshake + awaitable = False + if performing_handshake: + self.performed_handshake = True + cmd["client"] = self.opts.metadata + if self.compression_settings: + cmd["compression"] = self.compression_settings.compressors + if self.opts.load_balanced: + cmd["loadBalanced"] = True + elif topology_version is not None: + cmd["topologyVersion"] = topology_version + assert heartbeat_frequency is not None + cmd["maxAwaitTimeMS"] = int(heartbeat_frequency * 1000) + awaitable = True + # If connect_timeout is None there is no timeout. + if self.opts.connect_timeout: + self.set_conn_timeout(self.opts.connect_timeout + heartbeat_frequency) + + if not performing_handshake and cluster_time is not None: + cmd["$clusterTime"] = cluster_time + + creds = self.opts._credentials + if creds: + if creds.mechanism == "DEFAULT" and creds.username: + cmd["saslSupportedMechs"] = creds.source + "." + creds.username + auth_ctx = auth._AuthContext.from_credentials(creds, self.address) + if auth_ctx: + speculative_authenticate = auth_ctx.speculate_command() + if speculative_authenticate is not None: + cmd["speculativeAuthenticate"] = speculative_authenticate + else: + auth_ctx = None + + if performing_handshake: + start = time.monotonic() + doc = self.command("admin", cmd, publish_events=False, exhaust_allowed=awaitable) + if performing_handshake: + self.connect_rtt = time.monotonic() - start + hello = Hello(doc, awaitable=awaitable) + self.is_writable = hello.is_writable + self.max_wire_version = hello.max_wire_version + self.max_bson_size = hello.max_bson_size + self.max_message_size = hello.max_message_size + self.max_write_batch_size = hello.max_write_batch_size + self.supports_sessions = hello.logical_session_timeout_minutes is not None + self.hello_ok = hello.hello_ok + self.is_repl = hello.server_type in ( + SERVER_TYPE.RSPrimary, + SERVER_TYPE.RSSecondary, + SERVER_TYPE.RSArbiter, + SERVER_TYPE.RSOther, + SERVER_TYPE.RSGhost, + ) + self.is_standalone = hello.server_type == SERVER_TYPE.Standalone + self.is_mongos = hello.server_type == SERVER_TYPE.Mongos + if performing_handshake and self.compression_settings: + ctx = self.compression_settings.get_compression_context(hello.compressors) + self.compression_context = ctx + + self.op_msg_enabled = True + if creds: + self.negotiated_mechs = hello.sasl_supported_mechs + if auth_ctx: + auth_ctx.parse_response(hello) + if auth_ctx.speculate_succeeded(): + self.auth_ctx = auth_ctx + if self.opts.load_balanced: + if not hello.service_id: + raise ConfigurationError( + "Driver attempted to initialize in load balancing mode," + " but the server does not support this mode" + ) + self.service_id = hello.service_id + self.generation = self.pool_gen.get(self.service_id) + return hello + + def _next_reply(self) -> Dict[str, Any]: + reply = self.receive_message(None) + self.more_to_come = reply.more_to_come + unpacked_docs = reply.unpack_response() + response_doc = unpacked_docs[0] + helpers._check_command_response(response_doc, self.max_wire_version) + return response_doc + + @_handle_reauth + def command( + self, + dbname: str, + spec: MutableMapping[str, Any], + read_preference: _ServerMode = ReadPreference.PRIMARY, + codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS, + check: bool = True, + allowable_errors: Optional[Sequence[Union[str, int]]] = None, + read_concern: Optional[ReadConcern] = None, + write_concern: Optional[WriteConcern] = None, + parse_write_concern_error: bool = False, + collation: Optional[_CollationIn] = None, + session: Optional[ClientSession] = None, + client: Optional[MongoClient] = None, + retryable_write: bool = False, + publish_events: bool = True, + user_fields: Optional[Mapping[str, Any]] = None, + exhaust_allowed: bool = False, + ) -> Dict[str, Any]: + """Execute a command or raise an error. + + :Parameters: + - `dbname`: name of the database on which to run the command + - `spec`: a command document as a dict, SON, or mapping object + - `read_preference`: a read preference + - `codec_options`: a CodecOptions instance + - `check`: raise OperationFailure if there are errors + - `allowable_errors`: errors to ignore if `check` is True + - `read_concern`: The read concern for this command. + - `write_concern`: The write concern for this command. + - `parse_write_concern_error`: Whether to parse the + ``writeConcernError`` field in the command response. + - `collation`: The collation for this command. + - `session`: optional ClientSession instance. + - `client`: optional MongoClient for gossipping $clusterTime. + - `retryable_write`: True if this command is a retryable write. + - `publish_events`: Should we publish events for this command? + - `user_fields` (optional): Response fields that should be decoded + using the TypeDecoders from codec_options, passed to + bson._decode_all_selective. + """ + self.validate_session(client, session) + session = _validate_session_write_concern(session, write_concern) + + # Ensure command name remains in first place. + if not isinstance(spec, ORDERED_TYPES): # type:ignore[arg-type] + spec = SON(spec) + + if not (write_concern is None or write_concern.acknowledged or collation is None): + raise ConfigurationError("Collation is unsupported for unacknowledged writes.") + + self.add_server_api(spec) + if session: + session._apply_to(spec, retryable_write, read_preference, self) + self.send_cluster_time(spec, session, client) + listeners = self.listeners if publish_events else None + unacknowledged = bool(write_concern and not write_concern.acknowledged) + if self.op_msg_enabled: + self._raise_if_not_writable(unacknowledged) + try: + return command( + self, + dbname, + spec, + self.is_mongos, + read_preference, + codec_options, + session, + client, + check, + allowable_errors, + self.address, + listeners, + self.max_bson_size, + read_concern, + parse_write_concern_error=parse_write_concern_error, + collation=collation, + compression_ctx=self.compression_context, + use_op_msg=self.op_msg_enabled, + unacknowledged=unacknowledged, + user_fields=user_fields, + exhaust_allowed=exhaust_allowed, + write_concern=write_concern, + ) + except (OperationFailure, NotPrimaryError): + raise + # Catch socket.error, KeyboardInterrupt, etc. and close ourselves. + except BaseException as error: + self._raise_connection_failure(error) + + def send_message(self, message: bytes, max_doc_size: int) -> None: + """Send a raw BSON message or raise ConnectionFailure. + + If a network exception is raised, the socket is closed. + """ + if self.max_bson_size is not None and max_doc_size > self.max_bson_size: + raise DocumentTooLarge( + "BSON document too large (%d bytes) - the connected server " + "supports BSON document sizes up to %d bytes." % (max_doc_size, self.max_bson_size) + ) + + try: + self.conn.sendall(message) + except BaseException as error: + self._raise_connection_failure(error) + + def receive_message(self, request_id: Optional[int]) -> Union[_OpReply, _OpMsg]: + """Receive a raw BSON message or raise ConnectionFailure. + + If any exception is raised, the socket is closed. + """ + try: + return receive_message(self, request_id, self.max_message_size) + except BaseException as error: + self._raise_connection_failure(error) + + def _raise_if_not_writable(self, unacknowledged: bool) -> None: + """Raise NotPrimaryError on unacknowledged write if this socket is not + writable. + """ + if unacknowledged and not self.is_writable: + # Write won't succeed, bail as if we'd received a not primary error. + raise NotPrimaryError("not primary", {"ok": 0, "errmsg": "not primary", "code": 10107}) + + def unack_write(self, msg: bytes, max_doc_size: int) -> None: + """Send unack OP_MSG. + + Can raise ConnectionFailure or InvalidDocument. + + :Parameters: + - `msg`: bytes, an OP_MSG message. + - `max_doc_size`: size in bytes of the largest document in `msg`. + """ + self._raise_if_not_writable(True) + self.send_message(msg, max_doc_size) + + def write_command( + self, request_id: int, msg: bytes, codec_options: CodecOptions + ) -> Dict[str, Any]: + """Send "insert" etc. command, returning response as a dict. + + Can raise ConnectionFailure or OperationFailure. + + :Parameters: + - `request_id`: an int. + - `msg`: bytes, the command message. + """ + self.send_message(msg, 0) + reply = self.receive_message(request_id) + result = reply.command_response(codec_options) + + # Raises NotPrimaryError or OperationFailure. + helpers._check_command_response(result, self.max_wire_version) + return result + + def authenticate(self, reauthenticate: bool = False) -> None: + """Authenticate to the server if needed. + + Can raise ConnectionFailure or OperationFailure. + """ + # CMAP spec says to publish the ready event only after authenticating + # the connection. + if reauthenticate: + if self.performed_handshake: + # Existing auth_ctx is stale, remove it. + self.auth_ctx = None + self.ready = False + if not self.ready: + creds = self.opts._credentials + if creds: + auth.authenticate(creds, self, reauthenticate=reauthenticate) + self.ready = True + if self.enabled_for_cmap: + assert self.listeners is not None + self.listeners.publish_connection_ready(self.address, self.id) + + def validate_session( + self, client: Optional[MongoClient], session: Optional[ClientSession] + ) -> None: + """Validate this session before use with client. + + Raises error if the client is not the one that created the session. + """ + if session: + if session._client is not client: + raise InvalidOperation("Can only use session with the MongoClient that started it") + + def close_conn(self, reason: Optional[str]) -> None: + """Close this connection with a reason.""" + if self.closed: + return + self._close_conn() + if reason and self.enabled_for_cmap: + assert self.listeners is not None + self.listeners.publish_connection_closed(self.address, self.id, reason) + + def _close_conn(self) -> None: + """Close this connection.""" + if self.closed: + return + self.closed = True + if self.cancel_context: + self.cancel_context.cancel() + # Note: We catch exceptions to avoid spurious errors on interpreter + # shutdown. + try: + self.conn.close() + except Exception: + pass + + def conn_closed(self) -> bool: + """Return True if we know socket has been closed, False otherwise.""" + return self.socket_checker.socket_closed(self.conn) + + def send_cluster_time( + self, + command: MutableMapping[str, Any], + session: Optional[ClientSession], + client: Optional[MongoClient], + ) -> None: + """Add $clusterTime.""" + if client: + client._send_cluster_time(command, session) + + def add_server_api(self, command: MutableMapping[str, Any]) -> None: + """Add server_api parameters.""" + if self.opts.server_api: + _add_to_command(command, self.opts.server_api) + + def update_last_checkin_time(self) -> None: + self.last_checkin_time = time.monotonic() + + def update_is_writable(self, is_writable: bool) -> None: + self.is_writable = is_writable + + def idle_time_seconds(self) -> float: + """Seconds since this socket was last checked into its pool.""" + return time.monotonic() - self.last_checkin_time + + def _raise_connection_failure(self, error: BaseException) -> NoReturn: + # Catch *all* exceptions from socket methods and close the socket. In + # regular Python, socket operations only raise socket.error, even if + # the underlying cause was a Ctrl-C: a signal raised during socket.recv + # is expressed as an EINTR error from poll. See internal_select_ex() in + # socketmodule.c. All error codes from poll become socket.error at + # first. Eventually in PyEval_EvalFrameEx the interpreter checks for + # signals and throws KeyboardInterrupt into the current frame on the + # main thread. + # + # But in Gevent and Eventlet, the polling mechanism (epoll, kqueue, + # ...) is called in Python code, which experiences the signal as a + # KeyboardInterrupt from the start, rather than as an initial + # socket.error, so we catch that, close the socket, and reraise it. + # + # The connection closed event will be emitted later in checkin. + if self.ready: + reason = None + else: + reason = ConnectionClosedReason.ERROR + self.close_conn(reason) + # SSLError from PyOpenSSL inherits directly from Exception. + if isinstance(error, (IOError, OSError, SSLError)): + _raise_connection_failure(self.address, error) + else: + raise + + def __eq__(self, other: Any) -> bool: + return self.conn == other.conn + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __hash__(self) -> int: + return hash(self.conn) + + def __repr__(self) -> str: + return "Connection({}){} at {}".format( + repr(self.conn), + self.closed and " CLOSED" or "", + id(self), + ) + + +def _create_connection(address: _Address, options: PoolOptions) -> socket.socket: + """Given (host, port) and PoolOptions, connect and return a socket object. + + Can raise socket.error. + + This is a modified version of create_connection from CPython >= 2.7. + """ + host, port = address + + # Check if dealing with a unix domain socket + if host.endswith(".sock"): + if not hasattr(socket, "AF_UNIX"): + raise ConnectionFailure("UNIX-sockets are not supported on this system") + sock = socket.socket(socket.AF_UNIX) + # SOCK_CLOEXEC not supported for Unix sockets. + _set_non_inheritable_non_atomic(sock.fileno()) + try: + sock.connect(host) + return sock + except OSError: + sock.close() + raise + + # Don't try IPv6 if we don't support it. Also skip it if host + # is 'localhost' (::1 is fine). Avoids slow connect issues + # like PYTHON-356. + family = socket.AF_INET + if socket.has_ipv6 and host != "localhost": + family = socket.AF_UNSPEC + + err = None + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + af, socktype, proto, dummy, sa = res + # SOCK_CLOEXEC was new in CPython 3.2, and only available on a limited + # number of platforms (newer Linux and *BSD). Starting with CPython 3.4 + # all file descriptors are created non-inheritable. See PEP 446. + try: + sock = socket.socket(af, socktype | getattr(socket, "SOCK_CLOEXEC", 0), proto) + except OSError: + # Can SOCK_CLOEXEC be defined even if the kernel doesn't support + # it? + sock = socket.socket(af, socktype, proto) + # Fallback when SOCK_CLOEXEC isn't available. + _set_non_inheritable_non_atomic(sock.fileno()) + try: + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + # CSOT: apply timeout to socket connect. + timeout = _csot.remaining() + if timeout is None: + timeout = options.connect_timeout + elif timeout <= 0: + raise socket.timeout("timed out") + sock.settimeout(timeout) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True) + _set_keepalive_times(sock) + sock.connect(sa) + return sock + except OSError as e: + err = e + sock.close() + + if err is not None: + raise err + else: + # This likely means we tried to connect to an IPv6 only + # host with an OS/kernel or Python interpreter that doesn't + # support IPv6. The test case is Jython2.5.1 which doesn't + # support IPv6 at all. + raise OSError("getaddrinfo failed") + + +def _configured_socket(address: _Address, options: PoolOptions) -> Union[socket.socket, _sslConn]: + """Given (host, port) and PoolOptions, return a configured socket. + + Can raise socket.error, ConnectionFailure, or _CertificateError. + + Sets socket's SSL and timeout options. + """ + sock = _create_connection(address, options) + ssl_context = options._ssl_context + + if ssl_context is None: + sock.settimeout(options.socket_timeout) + return sock + + host = address[0] + try: + # We have to pass hostname / ip address to wrap_socket + # to use SSLContext.check_hostname. + if HAS_SNI: + ssl_sock = ssl_context.wrap_socket(sock, server_hostname=host) + else: + ssl_sock = ssl_context.wrap_socket(sock) + except _CertificateError: + sock.close() + # Raise _CertificateError directly like we do after match_hostname + # below. + raise + except (OSError, SSLError) as exc: # noqa: B014 + sock.close() + # We raise AutoReconnect for transient and permanent SSL handshake + # failures alike. Permanent handshake failures, like protocol + # mismatch, will be turned into ServerSelectionTimeoutErrors later. + _raise_connection_failure(address, exc, "SSL handshake failed: ") + if ( + ssl_context.verify_mode + and not ssl_context.check_hostname + and not options.tls_allow_invalid_hostnames + ): + try: + ssl.match_hostname(ssl_sock.getpeercert(), hostname=host) + except _CertificateError: + ssl_sock.close() + raise + + ssl_sock.settimeout(options.socket_timeout) + return ssl_sock + + +class _PoolClosedError(PyMongoError): + """Internal error raised when a thread tries to get a connection from a + closed pool. + """ + + +class _PoolGeneration: + def __init__(self) -> None: + # Maps service_id to generation. + self._generations: Dict[ObjectId, int] = collections.defaultdict(int) + # Overall pool generation. + self._generation = 0 + + def get(self, service_id: Optional[ObjectId]) -> int: + """Get the generation for the given service_id.""" + if service_id is None: + return self._generation + return self._generations[service_id] + + def get_overall(self) -> int: + """Get the Pool's overall generation.""" + return self._generation + + def inc(self, service_id: Optional[ObjectId]) -> None: + """Increment the generation for the given service_id.""" + self._generation += 1 + if service_id is None: + for service_id in self._generations: + self._generations[service_id] += 1 + else: + self._generations[service_id] += 1 + + def stale(self, gen: int, service_id: Optional[ObjectId]) -> bool: + """Return if the given generation for a given service_id is stale.""" + return gen != self.get(service_id) + + +class PoolState: + PAUSED = 1 + READY = 2 + CLOSED = 3 + + +# Do *not* explicitly inherit from object or Jython won't call __del__ +# http://bugs.jython.org/issue1057 +class Pool: + def __init__(self, address: _Address, options: PoolOptions, handshake: bool = True): + """ + :Parameters: + - `address`: a (hostname, port) tuple + - `options`: a PoolOptions instance + - `handshake`: whether to call hello for each new Connection + """ + if options.pause_enabled: + self.state = PoolState.PAUSED + else: + self.state = PoolState.READY + # Check a socket's health with socket_closed() every once in a while. + # Can override for testing: 0 to always check, None to never check. + self._check_interval_seconds = 1 + # LIFO pool. Sockets are ordered on idle time. Sockets claimed + # and returned to pool from the left side. Stale sockets removed + # from the right side. + self.conns: collections.deque = collections.deque() + self.lock = _create_lock() + self.active_sockets = 0 + # Monotonically increasing connection ID required for CMAP Events. + self.next_connection_id = 1 + # Track whether the sockets in this pool are writeable or not. + self.is_writable: Optional[bool] = None + + # Keep track of resets, so we notice sockets created before the most + # recent reset and close them. + # self.generation = 0 + self.gen = _PoolGeneration() + self.pid = os.getpid() + self.address = address + self.opts = options + self.handshake = handshake + # Don't publish events in Monitor pools. + self.enabled_for_cmap = ( + self.handshake + and self.opts._event_listeners is not None + and self.opts._event_listeners.enabled_for_cmap + ) + + # The first portion of the wait queue. + # Enforces: maxPoolSize + # Also used for: clearing the wait queue + self.size_cond = threading.Condition(self.lock) + self.requests = 0 + self.max_pool_size = self.opts.max_pool_size + if not self.max_pool_size: + self.max_pool_size = float("inf") + # The second portion of the wait queue. + # Enforces: maxConnecting + # Also used for: clearing the wait queue + self._max_connecting_cond = threading.Condition(self.lock) + self._max_connecting = self.opts.max_connecting + self._pending = 0 + if self.enabled_for_cmap: + assert self.opts._event_listeners is not None + self.opts._event_listeners.publish_pool_created( + self.address, self.opts.non_default_options + ) + # Similar to active_sockets but includes threads in the wait queue. + self.operation_count = 0 + # Retain references to pinned connections to prevent the CPython GC + # from thinking that a cursor's pinned connection can be GC'd when the + # cursor is GC'd (see PYTHON-2751). + self.__pinned_sockets: Set[Connection] = set() + self.ncursors = 0 + self.ntxns = 0 + + def ready(self) -> None: + # Take the lock to avoid the race condition described in PYTHON-2699. + with self.lock: + if self.state != PoolState.READY: + self.state = PoolState.READY + if self.enabled_for_cmap: + assert self.opts._event_listeners is not None + self.opts._event_listeners.publish_pool_ready(self.address) + + @property + def closed(self) -> bool: + return self.state == PoolState.CLOSED + + def _reset( + self, close: bool, pause: bool = True, service_id: Optional[ObjectId] = None + ) -> None: + old_state = self.state + with self.size_cond: + if self.closed: + return + if self.opts.pause_enabled and pause and not self.opts.load_balanced: + old_state, self.state = self.state, PoolState.PAUSED + self.gen.inc(service_id) + newpid = os.getpid() + if self.pid != newpid: + self.pid = newpid + self.active_sockets = 0 + self.operation_count = 0 + if service_id is None: + sockets, self.conns = self.conns, collections.deque() + else: + discard: collections.deque = collections.deque() + keep: collections.deque = collections.deque() + for conn in self.conns: + if conn.service_id == service_id: + discard.append(conn) + else: + keep.append(conn) + sockets = discard + self.conns = keep + + if close: + self.state = PoolState.CLOSED + # Clear the wait queue + self._max_connecting_cond.notify_all() + self.size_cond.notify_all() + + listeners = self.opts._event_listeners + # CMAP spec says that close() MUST close sockets before publishing the + # PoolClosedEvent but that reset() SHOULD close sockets *after* + # publishing the PoolClearedEvent. + if close: + for conn in sockets: + conn.close_conn(ConnectionClosedReason.POOL_CLOSED) + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_pool_closed(self.address) + else: + if old_state != PoolState.PAUSED and self.enabled_for_cmap: + assert listeners is not None + listeners.publish_pool_cleared(self.address, service_id=service_id) + for conn in sockets: + conn.close_conn(ConnectionClosedReason.STALE) + + def update_is_writable(self, is_writable: Optional[bool]) -> None: + """Updates the is_writable attribute on all sockets currently in the + Pool. + """ + self.is_writable = is_writable + with self.lock: + for _socket in self.conns: + _socket.update_is_writable(self.is_writable) + + def reset(self, service_id: Optional[ObjectId] = None) -> None: + self._reset(close=False, service_id=service_id) + + def reset_without_pause(self) -> None: + self._reset(close=False, pause=False) + + def close(self) -> None: + self._reset(close=True) + + def stale_generation(self, gen: int, service_id: Optional[ObjectId]) -> bool: + return self.gen.stale(gen, service_id) + + def remove_stale_sockets(self, reference_generation: int) -> None: + """Removes stale sockets then adds new ones if pool is too small and + has not been reset. The `reference_generation` argument specifies the + `generation` at the point in time this operation was requested on the + pool. + """ + # Take the lock to avoid the race condition described in PYTHON-2699. + with self.lock: + if self.state != PoolState.READY: + return + + if self.opts.max_idle_time_seconds is not None: + with self.lock: + while ( + self.conns + and self.conns[-1].idle_time_seconds() > self.opts.max_idle_time_seconds + ): + conn = self.conns.pop() + conn.close_conn(ConnectionClosedReason.IDLE) + + while True: + with self.size_cond: + # There are enough sockets in the pool. + if len(self.conns) + self.active_sockets >= self.opts.min_pool_size: + return + if self.requests >= self.opts.min_pool_size: + return + self.requests += 1 + incremented = False + try: + with self._max_connecting_cond: + # If maxConnecting connections are already being created + # by this pool then try again later instead of waiting. + if self._pending >= self._max_connecting: + return + self._pending += 1 + incremented = True + conn = self.connect() + with self.lock: + # Close connection and return if the pool was reset during + # socket creation or while acquiring the pool lock. + if self.gen.get_overall() != reference_generation: + conn.close_conn(ConnectionClosedReason.STALE) + return + self.conns.appendleft(conn) + finally: + if incremented: + # Notify after adding the socket to the pool. + with self._max_connecting_cond: + self._pending -= 1 + self._max_connecting_cond.notify() + + with self.size_cond: + self.requests -= 1 + self.size_cond.notify() + + def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connection: + """Connect to Mongo and return a new Connection. + + Can raise ConnectionFailure. + + Note that the pool does not keep a reference to the socket -- you + must call checkin() when you're done with it. + """ + with self.lock: + conn_id = self.next_connection_id + self.next_connection_id += 1 + + listeners = self.opts._event_listeners + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_connection_created(self.address, conn_id) + + try: + sock = _configured_socket(self.address, self.opts) + except BaseException as error: + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_connection_closed( + self.address, conn_id, ConnectionClosedReason.ERROR + ) + + if isinstance(error, (IOError, OSError, SSLError)): + _raise_connection_failure(self.address, error) + + raise + + conn = Connection(sock, self, self.address, conn_id) # type: ignore[arg-type] + try: + if self.handshake: + conn.hello() + self.is_writable = conn.is_writable + if handler: + handler.contribute_socket(conn, completed_handshake=False) + + conn.authenticate() + except BaseException: + conn.close_conn(ConnectionClosedReason.ERROR) + raise + + return conn + + @contextlib.contextmanager + def checkout(self, handler: Optional[_MongoClientErrorHandler] = None) -> Iterator[Connection]: + """Get a connection from the pool. Use with a "with" statement. + + Returns a :class:`Connection` object wrapping a connected + :class:`socket.socket`. + + This method should always be used in a with-statement:: + + with pool.get_conn() as connection: + connection.send_message(msg) + data = connection.receive_message(op_code, request_id) + + Can raise ConnectionFailure or OperationFailure. + + :Parameters: + - `handler` (optional): A _MongoClientErrorHandler. + """ + listeners = self.opts._event_listeners + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_connection_check_out_started(self.address) + + conn = self._get_conn(handler=handler) + + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_connection_checked_out(self.address, conn.id) + try: + yield conn + except BaseException: + # Exception in caller. Ensure the connection gets returned. + # Note that when pinned is True, the session owns the + # connection and it is responsible for checking the connection + # back into the pool. + pinned = conn.pinned_txn or conn.pinned_cursor + if handler: + # Perform SDAM error handling rules while the connection is + # still checked out. + exc_type, exc_val, _ = sys.exc_info() + handler.handle(exc_type, exc_val) + if not pinned and conn.active: + self.checkin(conn) + raise + if conn.pinned_txn: + with self.lock: + self.__pinned_sockets.add(conn) + self.ntxns += 1 + elif conn.pinned_cursor: + with self.lock: + self.__pinned_sockets.add(conn) + self.ncursors += 1 + elif conn.active: + self.checkin(conn) + + def _raise_if_not_ready(self, emit_event: bool) -> None: + if self.state != PoolState.READY: + if self.enabled_for_cmap and emit_event: + assert self.opts._event_listeners is not None + self.opts._event_listeners.publish_connection_check_out_failed( + self.address, ConnectionCheckOutFailedReason.CONN_ERROR + ) + _raise_connection_failure(self.address, AutoReconnect("connection pool paused")) + + def _get_conn(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connection: + """Get or create a Connection. Can raise ConnectionFailure.""" + # We use the pid here to avoid issues with fork / multiprocessing. + # See test.test_client:TestClient.test_fork for an example of + # what could go wrong otherwise + if self.pid != os.getpid(): + self.reset_without_pause() + + if self.closed: + if self.enabled_for_cmap: + assert self.opts._event_listeners is not None + self.opts._event_listeners.publish_connection_check_out_failed( + self.address, ConnectionCheckOutFailedReason.POOL_CLOSED + ) + raise _PoolClosedError( + "Attempted to check out a connection from closed connection pool" + ) + + with self.lock: + self.operation_count += 1 + + # Get a free socket or create one. + if _csot.get_timeout(): + deadline = _csot.get_deadline() + elif self.opts.wait_queue_timeout: + deadline = time.monotonic() + self.opts.wait_queue_timeout + else: + deadline = None + + with self.size_cond: + self._raise_if_not_ready(emit_event=True) + while not (self.requests < self.max_pool_size): + if not _cond_wait(self.size_cond, deadline): + # Timed out, notify the next thread to ensure a + # timeout doesn't consume the condition. + if self.requests < self.max_pool_size: + self.size_cond.notify() + self._raise_wait_queue_timeout() + self._raise_if_not_ready(emit_event=True) + self.requests += 1 + + # We've now acquired the semaphore and must release it on error. + conn = None + incremented = False + emitted_event = False + try: + with self.lock: + self.active_sockets += 1 + incremented = True + + while conn is None: + # CMAP: we MUST wait for either maxConnecting OR for a socket + # to be checked back into the pool. + with self._max_connecting_cond: + self._raise_if_not_ready(emit_event=False) + while not (self.conns or self._pending < self._max_connecting): + if not _cond_wait(self._max_connecting_cond, deadline): + # Timed out, notify the next thread to ensure a + # timeout doesn't consume the condition. + if self.conns or self._pending < self._max_connecting: + self._max_connecting_cond.notify() + emitted_event = True + self._raise_wait_queue_timeout() + self._raise_if_not_ready(emit_event=False) + + try: + conn = self.conns.popleft() + except IndexError: + self._pending += 1 + if conn: # We got a socket from the pool + if self._perished(conn): + conn = None + continue + else: # We need to create a new connection + try: + conn = self.connect(handler=handler) + finally: + with self._max_connecting_cond: + self._pending -= 1 + self._max_connecting_cond.notify() + except BaseException: + if conn: + # We checked out a socket but authentication failed. + conn.close_conn(ConnectionClosedReason.ERROR) + with self.size_cond: + self.requests -= 1 + if incremented: + self.active_sockets -= 1 + self.size_cond.notify() + + if self.enabled_for_cmap and not emitted_event: + assert self.opts._event_listeners is not None + self.opts._event_listeners.publish_connection_check_out_failed( + self.address, ConnectionCheckOutFailedReason.CONN_ERROR + ) + raise + + conn.active = True + return conn + + def checkin(self, conn: Connection) -> None: + """Return the connection to the pool, or if it's closed discard it. + + :Parameters: + - `conn`: The connection to check into the pool. + """ + txn = conn.pinned_txn + cursor = conn.pinned_cursor + conn.active = False + conn.pinned_txn = False + conn.pinned_cursor = False + self.__pinned_sockets.discard(conn) + listeners = self.opts._event_listeners + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_connection_checked_in(self.address, conn.id) + if self.pid != os.getpid(): + self.reset_without_pause() + else: + if self.closed: + conn.close_conn(ConnectionClosedReason.POOL_CLOSED) + elif conn.closed: + # CMAP requires the closed event be emitted after the check in. + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_connection_closed( + self.address, conn.id, ConnectionClosedReason.ERROR + ) + else: + with self.lock: + # Hold the lock to ensure this section does not race with + # Pool.reset(). + if self.stale_generation(conn.generation, conn.service_id): + conn.close_conn(ConnectionClosedReason.STALE) + else: + conn.update_last_checkin_time() + conn.update_is_writable(bool(self.is_writable)) + self.conns.appendleft(conn) + # Notify any threads waiting to create a connection. + self._max_connecting_cond.notify() + + with self.size_cond: + if txn: + self.ntxns -= 1 + elif cursor: + self.ncursors -= 1 + self.requests -= 1 + self.active_sockets -= 1 + self.operation_count -= 1 + self.size_cond.notify() + + def _perished(self, conn: Connection) -> bool: + """Return True and close the connection if it is "perished". + + This side-effecty function checks if this socket has been idle for + for longer than the max idle time, or if the socket has been closed by + some external network error, or if the socket's generation is outdated. + + Checking sockets lets us avoid seeing *some* + :class:`~pymongo.errors.AutoReconnect` exceptions on server + hiccups, etc. We only check if the socket was closed by an external + error if it has been > 1 second since the socket was checked into the + pool, to keep performance reasonable - we can't avoid AutoReconnects + completely anyway. + """ + idle_time_seconds = conn.idle_time_seconds() + # If socket is idle, open a new one. + if ( + self.opts.max_idle_time_seconds is not None + and idle_time_seconds > self.opts.max_idle_time_seconds + ): + conn.close_conn(ConnectionClosedReason.IDLE) + return True + + if self._check_interval_seconds is not None and ( + 0 == self._check_interval_seconds or idle_time_seconds > self._check_interval_seconds + ): + if conn.conn_closed(): + conn.close_conn(ConnectionClosedReason.ERROR) + return True + + if self.stale_generation(conn.generation, conn.service_id): + conn.close_conn(ConnectionClosedReason.STALE) + return True + + return False + + def _raise_wait_queue_timeout(self) -> NoReturn: + listeners = self.opts._event_listeners + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_connection_check_out_failed( + self.address, ConnectionCheckOutFailedReason.TIMEOUT + ) + timeout = _csot.get_timeout() or self.opts.wait_queue_timeout + if self.opts.load_balanced: + other_ops = self.active_sockets - self.ncursors - self.ntxns + raise WaitQueueTimeoutError( + "Timeout waiting for connection from the connection pool. " + "maxPoolSize: {}, connections in use by cursors: {}, " + "connections in use by transactions: {}, connections in use " + "by other operations: {}, timeout: {}".format( + self.opts.max_pool_size, + self.ncursors, + self.ntxns, + other_ops, + timeout, + ) + ) + raise WaitQueueTimeoutError( + "Timed out while checking out a connection from connection pool. " + "maxPoolSize: {}, timeout: {}".format(self.opts.max_pool_size, timeout) + ) + + def __del__(self) -> None: + # Avoid ResourceWarnings in Python 3 + # Close all sockets without calling reset() or close() because it is + # not safe to acquire a lock in __del__. + for conn in self.conns: + conn.close_conn(None) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/py.typed b/backend/test/lib/python3.8/site-packages/pymongo/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..0f4057061a763b445a4300825a450069a96f5719 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/py.typed @@ -0,0 +1,2 @@ +# PEP-561 Support File. +# "Package maintainers who wish to support type checking of their code MUST add a marker file named py.typed to their package supporting typing". diff --git a/backend/test/lib/python3.8/site-packages/pymongo/pyopenssl_context.py b/backend/test/lib/python3.8/site-packages/pymongo/pyopenssl_context.py new file mode 100644 index 0000000000000000000000000000000000000000..c169173816d1289ecbf3489fe2f9781d6512d067 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/pyopenssl_context.py @@ -0,0 +1,414 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""A CPython compatible SSLContext implementation wrapping PyOpenSSL's +context. +""" +from __future__ import annotations + +import socket as _socket +import ssl as _stdlibssl +import sys as _sys +import time as _time +from errno import EINTR as _EINTR +from ipaddress import ip_address as _ip_address +from typing import TYPE_CHECKING, Any, Callable, List, Optional, TypeVar, Union + +from cryptography.x509 import load_der_x509_certificate as _load_der_x509_certificate +from OpenSSL import SSL as _SSL +from OpenSSL import crypto as _crypto +from service_identity import CertificateError as _SICertificateError +from service_identity import VerificationError as _SIVerificationError +from service_identity.pyopenssl import verify_hostname as _verify_hostname +from service_identity.pyopenssl import verify_ip_address as _verify_ip_address + +from pymongo.errors import ConfigurationError as _ConfigurationError +from pymongo.errors import _CertificateError +from pymongo.ocsp_cache import _OCSPCache +from pymongo.ocsp_support import _load_trusted_ca_certs, _ocsp_callback +from pymongo.socket_checker import SocketChecker as _SocketChecker +from pymongo.socket_checker import _errno_from_exception +from pymongo.write_concern import validate_boolean + +if TYPE_CHECKING: + from ssl import VerifyMode + + from cryptography.x509 import Certificate + +_T = TypeVar("_T") + +try: + import certifi + + _HAVE_CERTIFI = True +except ImportError: + _HAVE_CERTIFI = False + +PROTOCOL_SSLv23 = _SSL.SSLv23_METHOD +# Always available +OP_NO_SSLv2 = _SSL.OP_NO_SSLv2 +OP_NO_SSLv3 = _SSL.OP_NO_SSLv3 +OP_NO_COMPRESSION = _SSL.OP_NO_COMPRESSION +# This isn't currently documented for PyOpenSSL +OP_NO_RENEGOTIATION = getattr(_SSL, "OP_NO_RENEGOTIATION", 0) + +# Always available +HAS_SNI = True +IS_PYOPENSSL = True + +# Base Exception class +SSLError = _SSL.Error + +# https://github.com/python/cpython/blob/v3.8.0/Modules/_ssl.c#L2995-L3002 +_VERIFY_MAP = { + _stdlibssl.CERT_NONE: _SSL.VERIFY_NONE, + _stdlibssl.CERT_OPTIONAL: _SSL.VERIFY_PEER, + _stdlibssl.CERT_REQUIRED: _SSL.VERIFY_PEER | _SSL.VERIFY_FAIL_IF_NO_PEER_CERT, +} + +_REVERSE_VERIFY_MAP = {value: key for key, value in _VERIFY_MAP.items()} + + +# For SNI support. According to RFC6066, section 3, IPv4 and IPv6 literals are +# not permitted for SNI hostname. +def _is_ip_address(address: Any) -> bool: + try: + _ip_address(address) + return True + except (ValueError, UnicodeError): # noqa: B014 + return False + + +# According to the docs for socket.send it can raise +# WantX509LookupError and should be retried. +BLOCKING_IO_ERRORS = (_SSL.WantReadError, _SSL.WantWriteError, _SSL.WantX509LookupError) + + +def _ragged_eof(exc: BaseException) -> bool: + """Return True if the OpenSSL.SSL.SysCallError is a ragged EOF.""" + return exc.args == (-1, "Unexpected EOF") + + +# https://github.com/pyca/pyopenssl/issues/168 +# https://github.com/pyca/pyopenssl/issues/176 +# https://docs.python.org/3/library/ssl.html#notes-on-non-blocking-sockets +class _sslConn(_SSL.Connection): + def __init__( + self, ctx: _SSL.Context, sock: Optional[_socket.socket], suppress_ragged_eofs: bool + ): + self.socket_checker = _SocketChecker() + self.suppress_ragged_eofs = suppress_ragged_eofs + super().__init__(ctx, sock) + + def _call(self, call: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: + timeout = self.gettimeout() + if timeout: + start = _time.monotonic() + while True: + try: + return call(*args, **kwargs) + except BLOCKING_IO_ERRORS as exc: + # Check for closed socket. + if self.fileno() == -1: + if timeout and _time.monotonic() - start > timeout: + raise _socket.timeout("timed out") + raise SSLError("Underlying socket has been closed") + if isinstance(exc, _SSL.WantReadError): + want_read = True + want_write = False + elif isinstance(exc, _SSL.WantWriteError): + want_read = False + want_write = True + else: + want_read = True + want_write = True + self.socket_checker.select(self, want_read, want_write, timeout) + if timeout and _time.monotonic() - start > timeout: + raise _socket.timeout("timed out") + continue + + def do_handshake(self, *args: Any, **kwargs: Any) -> None: + return self._call(super().do_handshake, *args, **kwargs) + + def recv(self, *args: Any, **kwargs: Any) -> bytes: + try: + return self._call(super().recv, *args, **kwargs) + except _SSL.SysCallError as exc: + # Suppress ragged EOFs to match the stdlib. + if self.suppress_ragged_eofs and _ragged_eof(exc): + return b"" + raise + + def recv_into(self, *args: Any, **kwargs: Any) -> int: + try: + return self._call(super().recv_into, *args, **kwargs) + except _SSL.SysCallError as exc: + # Suppress ragged EOFs to match the stdlib. + if self.suppress_ragged_eofs and _ragged_eof(exc): + return 0 + raise + + def sendall(self, buf: bytes, flags: int = 0) -> None: # type: ignore[override] + view = memoryview(buf) + total_length = len(buf) + total_sent = 0 + while total_sent < total_length: + try: + sent = self._call(super().send, view[total_sent:], flags) + # XXX: It's not clear if this can actually happen. PyOpenSSL + # doesn't appear to have any interrupt handling, nor any interrupt + # errors for OpenSSL connections. + except OSError as exc: # noqa: B014 + if _errno_from_exception(exc) == _EINTR: + continue + raise + # https://github.com/pyca/pyopenssl/blob/19.1.0/src/OpenSSL/SSL.py#L1756 + # https://www.openssl.org/docs/man1.0.2/man3/SSL_write.html + if sent <= 0: + raise OSError("connection closed") + total_sent += sent + + +class _CallbackData: + """Data class which is passed to the OCSP callback.""" + + def __init__(self) -> None: + self.trusted_ca_certs: Optional[List[Certificate]] = None + self.check_ocsp_endpoint: Optional[bool] = None + self.ocsp_response_cache = _OCSPCache() + + +class SSLContext: + """A CPython compatible SSLContext implementation wrapping PyOpenSSL's + context. + """ + + __slots__ = ("_protocol", "_ctx", "_callback_data", "_check_hostname") + + def __init__(self, protocol: int): + self._protocol = protocol + self._ctx = _SSL.Context(self._protocol) + self._callback_data = _CallbackData() + self._check_hostname = True + # OCSP + # XXX: Find a better place to do this someday, since this is client + # side configuration and wrap_socket tries to support both client and + # server side sockets. + self._callback_data.check_ocsp_endpoint = True + self._ctx.set_ocsp_client_callback(callback=_ocsp_callback, data=self._callback_data) + + @property + def protocol(self) -> int: + """The protocol version chosen when constructing the context. + This attribute is read-only. + """ + return self._protocol + + def __get_verify_mode(self) -> VerifyMode: + """Whether to try to verify other peers' certificates and how to + behave if verification fails. This attribute must be one of + ssl.CERT_NONE, ssl.CERT_OPTIONAL or ssl.CERT_REQUIRED. + """ + return _REVERSE_VERIFY_MAP[self._ctx.get_verify_mode()] + + def __set_verify_mode(self, value: VerifyMode) -> None: + """Setter for verify_mode.""" + + def _cb( + connobj: _SSL.Connection, + x509obj: _crypto.X509, + errnum: int, + errdepth: int, + retcode: int, + ) -> bool: + # It seems we don't need to do anything here. Twisted doesn't, + # and OpenSSL's SSL_CTX_set_verify let's you pass NULL + # for the callback option. It's weird that PyOpenSSL requires + # this. + # This is optional in pyopenssl >= 20 and can be removed once minimum + # supported version is bumped + # See: pyopenssl.org/en/latest/changelog.html#id47 + return bool(retcode) + + self._ctx.set_verify(_VERIFY_MAP[value], _cb) + + verify_mode = property(__get_verify_mode, __set_verify_mode) + + def __get_check_hostname(self) -> bool: + return self._check_hostname + + def __set_check_hostname(self, value: Any) -> None: + validate_boolean("check_hostname", value) + self._check_hostname = value + + check_hostname = property(__get_check_hostname, __set_check_hostname) + + def __get_check_ocsp_endpoint(self) -> Optional[bool]: + return self._callback_data.check_ocsp_endpoint + + def __set_check_ocsp_endpoint(self, value: bool) -> None: + validate_boolean("check_ocsp", value) + self._callback_data.check_ocsp_endpoint = value + + check_ocsp_endpoint = property(__get_check_ocsp_endpoint, __set_check_ocsp_endpoint) + + def __get_options(self) -> None: + # Calling set_options adds the option to the existing bitmask and + # returns the new bitmask. + # https://www.pyopenssl.org/en/stable/api/ssl.html#OpenSSL.SSL.Context.set_options + return self._ctx.set_options(0) + + def __set_options(self, value: int) -> None: + # Explcitly convert to int, since newer CPython versions + # use enum.IntFlag for options. The values are the same + # regardless of implementation. + self._ctx.set_options(int(value)) + + options = property(__get_options, __set_options) + + def load_cert_chain( + self, + certfile: Union[str, bytes], + keyfile: Union[str, bytes, None] = None, + password: Optional[str] = None, + ) -> None: + """Load a private key and the corresponding certificate. The certfile + string must be the path to a single file in PEM format containing the + certificate as well as any number of CA certificates needed to + establish the certificate's authenticity. The keyfile string, if + present, must point to a file containing the private key. Otherwise + the private key will be taken from certfile as well. + """ + # Match CPython behavior + # https://github.com/python/cpython/blob/v3.8.0/Modules/_ssl.c#L3930-L3971 + # Password callback MUST be set first or it will be ignored. + if password: + + def _pwcb(max_length: int, prompt_twice: bool, user_data: bytes) -> bytes: + # XXX:We could check the password length against what OpenSSL + # tells us is the max, but we can't raise an exception, so... + # warn? + assert password is not None + return password.encode("utf-8") + + self._ctx.set_passwd_cb(_pwcb) + self._ctx.use_certificate_chain_file(certfile) + self._ctx.use_privatekey_file(keyfile or certfile) + self._ctx.check_privatekey() + + def load_verify_locations( + self, cafile: Optional[str] = None, capath: Optional[str] = None + ) -> None: + """Load a set of "certification authority"(CA) certificates used to + validate other peers' certificates when `~verify_mode` is other than + ssl.CERT_NONE. + """ + self._ctx.load_verify_locations(cafile, capath) + # Manually load the CA certs when get_verified_chain is not available (pyopenssl<20). + if not hasattr(_SSL.Connection, "get_verified_chain"): + assert cafile is not None + self._callback_data.trusted_ca_certs = _load_trusted_ca_certs(cafile) + + def _load_certifi(self) -> None: + """Attempt to load CA certs from certifi.""" + if _HAVE_CERTIFI: + self.load_verify_locations(certifi.where()) + else: + raise _ConfigurationError( + "tlsAllowInvalidCertificates is False but no system " + "CA certificates could be loaded. Please install the " + "certifi package, or provide a path to a CA file using " + "the tlsCAFile option" + ) + + def _load_wincerts(self, store: str) -> None: + """Attempt to load CA certs from Windows trust store.""" + cert_store = self._ctx.get_cert_store() + oid = _stdlibssl.Purpose.SERVER_AUTH.oid + for cert, encoding, trust in _stdlibssl.enum_certificates(store): # type: ignore + if encoding == "x509_asn": + if trust is True or oid in trust: + cert_store.add_cert( + _crypto.X509.from_cryptography(_load_der_x509_certificate(cert)) + ) + + def load_default_certs(self) -> None: + """A PyOpenSSL version of load_default_certs from CPython.""" + # PyOpenSSL is incapable of loading CA certs from Windows, and mostly + # incapable on macOS. + # https://www.pyopenssl.org/en/stable/api/ssl.html#OpenSSL.SSL.Context.set_default_verify_paths + if _sys.platform == "win32": + try: + for storename in ("CA", "ROOT"): + self._load_wincerts(storename) + except PermissionError: + # Fall back to certifi + self._load_certifi() + elif _sys.platform == "darwin": + self._load_certifi() + self._ctx.set_default_verify_paths() + + def set_default_verify_paths(self) -> None: + """Specify that the platform provided CA certificates are to be used + for verification purposes. + """ + # Note: See PyOpenSSL's docs for limitations, which are similar + # but not that same as CPython's. + self._ctx.set_default_verify_paths() + + def wrap_socket( + self, + sock: _socket.socket, + server_side: bool = False, + do_handshake_on_connect: bool = True, + suppress_ragged_eofs: bool = True, + server_hostname: Optional[str] = None, + session: Optional[_SSL.Session] = None, + ) -> _sslConn: + """Wrap an existing Python socket connection and return a TLS socket + object. + """ + ssl_conn = _sslConn(self._ctx, sock, suppress_ragged_eofs) + if session: + ssl_conn.set_session(session) + if server_side is True: + ssl_conn.set_accept_state() + else: + # SNI + if server_hostname and not _is_ip_address(server_hostname): + # XXX: Do this in a callback registered with + # SSLContext.set_info_callback? See Twisted for an example. + ssl_conn.set_tlsext_host_name(server_hostname.encode("idna")) + if self.verify_mode != _stdlibssl.CERT_NONE: + # Request a stapled OCSP response. + ssl_conn.request_ocsp() + ssl_conn.set_connect_state() + # If this wasn't true the caller of wrap_socket would call + # do_handshake() + if do_handshake_on_connect: + # XXX: If we do hostname checking in a callback we can get rid + # of this call to do_handshake() since the handshake + # will happen automatically later. + ssl_conn.do_handshake() + # XXX: Do this in a callback registered with + # SSLContext.set_info_callback? See Twisted for an example. + if self.check_hostname and server_hostname is not None: + try: + if _is_ip_address(server_hostname): + _verify_ip_address(ssl_conn, server_hostname) + else: + _verify_hostname(ssl_conn, server_hostname) + except (_SICertificateError, _SIVerificationError) as exc: + raise _CertificateError(str(exc)) + return ssl_conn diff --git a/backend/test/lib/python3.8/site-packages/pymongo/read_concern.py b/backend/test/lib/python3.8/site-packages/pymongo/read_concern.py new file mode 100644 index 0000000000000000000000000000000000000000..ddc90a817ab0fc483642462b9627749d80e669a1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/read_concern.py @@ -0,0 +1,76 @@ +# Copyright 2015 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License", +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for working with read concerns.""" + +from typing import Any, Dict, Optional + + +class ReadConcern: + """ReadConcern + + :Parameters: + - `level`: (string) The read concern level specifies the level of + isolation for read operations. For example, a read operation using a + read concern level of ``majority`` will only return data that has been + written to a majority of nodes. If the level is left unspecified, the + server default will be used. + + .. versionadded:: 3.2 + + """ + + def __init__(self, level: Optional[str] = None) -> None: + if level is None or isinstance(level, str): + self.__level = level + else: + raise TypeError("level must be a string or None.") + + @property + def level(self) -> Optional[str]: + """The read concern level.""" + return self.__level + + @property + def ok_for_legacy(self) -> bool: + """Return ``True`` if this read concern is compatible with + old wire protocol versions. + """ + return self.level is None or self.level == "local" + + @property + def document(self) -> Dict[str, Any]: + """The document representation of this read concern. + + .. note:: + :class:`ReadConcern` is immutable. Mutating the value of + :attr:`document` does not mutate this :class:`ReadConcern`. + """ + doc = {} + if self.__level: + doc["level"] = self.level + return doc + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ReadConcern): + return self.document == other.document + return NotImplemented + + def __repr__(self) -> str: + if self.level: + return "ReadConcern(%s)" % self.level + return "ReadConcern()" + + +DEFAULT_READ_CONCERN = ReadConcern() diff --git a/backend/test/lib/python3.8/site-packages/pymongo/read_preferences.py b/backend/test/lib/python3.8/site-packages/pymongo/read_preferences.py new file mode 100644 index 0000000000000000000000000000000000000000..477efeda3263b9b26acebf8ca69e11c5663844ee --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/read_preferences.py @@ -0,0 +1,627 @@ +# Copyright 2012-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License", +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for choosing which member of a replica set to read from.""" + +from __future__ import annotations + +from collections import abc +from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Sequence + +from pymongo import max_staleness_selectors +from pymongo.errors import ConfigurationError +from pymongo.server_selectors import ( + member_with_tags_server_selector, + secondary_with_tags_server_selector, +) + +if TYPE_CHECKING: + from pymongo.server_selectors import Selection + from pymongo.topology_description import TopologyDescription + +_PRIMARY = 0 +_PRIMARY_PREFERRED = 1 +_SECONDARY = 2 +_SECONDARY_PREFERRED = 3 +_NEAREST = 4 + + +_MONGOS_MODES = ( + "primary", + "primaryPreferred", + "secondary", + "secondaryPreferred", + "nearest", +) + +_Hedge = Mapping[str, Any] +_TagSets = Sequence[Mapping[str, Any]] + + +def _validate_tag_sets(tag_sets: Optional[_TagSets]) -> Optional[_TagSets]: + """Validate tag sets for a MongoClient.""" + if tag_sets is None: + return tag_sets + + if not isinstance(tag_sets, (list, tuple)): + raise TypeError(f"Tag sets {tag_sets!r} invalid, must be a sequence") + if len(tag_sets) == 0: + raise ValueError( + f"Tag sets {tag_sets!r} invalid, must be None or contain at least one set of tags" + ) + + for tags in tag_sets: + if not isinstance(tags, abc.Mapping): + raise TypeError( + "Tag set {!r} invalid, must be an instance of dict, " + "bson.son.SON or other type that inherits from " + "collection.Mapping".format(tags) + ) + + return list(tag_sets) + + +def _invalid_max_staleness_msg(max_staleness: Any) -> str: + return "maxStalenessSeconds must be a positive integer, not %s" % max_staleness + + +# Some duplication with common.py to avoid import cycle. +def _validate_max_staleness(max_staleness: Any) -> int: + """Validate max_staleness.""" + if max_staleness == -1: + return -1 + + if not isinstance(max_staleness, int): + raise TypeError(_invalid_max_staleness_msg(max_staleness)) + + if max_staleness <= 0: + raise ValueError(_invalid_max_staleness_msg(max_staleness)) + + return max_staleness + + +def _validate_hedge(hedge: Optional[_Hedge]) -> Optional[_Hedge]: + """Validate hedge.""" + if hedge is None: + return None + + if not isinstance(hedge, dict): + raise TypeError(f"hedge must be a dictionary, not {hedge!r}") + + return hedge + + +class _ServerMode: + """Base class for all read preferences.""" + + __slots__ = ("__mongos_mode", "__mode", "__tag_sets", "__max_staleness", "__hedge") + + def __init__( + self, + mode: int, + tag_sets: Optional[_TagSets] = None, + max_staleness: int = -1, + hedge: Optional[_Hedge] = None, + ) -> None: + self.__mongos_mode = _MONGOS_MODES[mode] + self.__mode = mode + self.__tag_sets = _validate_tag_sets(tag_sets) + self.__max_staleness = _validate_max_staleness(max_staleness) + self.__hedge = _validate_hedge(hedge) + + @property + def name(self) -> str: + """The name of this read preference.""" + return self.__class__.__name__ + + @property + def mongos_mode(self) -> str: + """The mongos mode of this read preference.""" + return self.__mongos_mode + + @property + def document(self) -> Dict[str, Any]: + """Read preference as a document.""" + doc: Dict[str, Any] = {"mode": self.__mongos_mode} + if self.__tag_sets not in (None, [{}]): + doc["tags"] = self.__tag_sets + if self.__max_staleness != -1: + doc["maxStalenessSeconds"] = self.__max_staleness + if self.__hedge not in (None, {}): + doc["hedge"] = self.__hedge + return doc + + @property + def mode(self) -> int: + """The mode of this read preference instance.""" + return self.__mode + + @property + def tag_sets(self) -> _TagSets: + """Set ``tag_sets`` to a list of dictionaries like [{'dc': 'ny'}] to + read only from members whose ``dc`` tag has the value ``"ny"``. + To specify a priority-order for tag sets, provide a list of + tag sets: ``[{'dc': 'ny'}, {'dc': 'la'}, {}]``. A final, empty tag + set, ``{}``, means "read from any member that matches the mode, + ignoring tags." MongoClient tries each set of tags in turn + until it finds a set of tags with at least one matching member. + For example, to only send a query to an analytic node:: + + Nearest(tag_sets=[{"node":"analytics"}]) + + Or using :class:`SecondaryPreferred`:: + + SecondaryPreferred(tag_sets=[{"node":"analytics"}]) + + .. seealso:: `Data-Center Awareness + <https://www.mongodb.com/docs/manual/data-center-awareness/>`_ + """ + return list(self.__tag_sets) if self.__tag_sets else [{}] + + @property + def max_staleness(self) -> int: + """The maximum estimated length of time (in seconds) a replica set + secondary can fall behind the primary in replication before it will + no longer be selected for operations, or -1 for no maximum. + """ + return self.__max_staleness + + @property + def hedge(self) -> Optional[_Hedge]: + """The read preference ``hedge`` parameter. + + A dictionary that configures how the server will perform hedged reads. + It consists of the following keys: + + - ``enabled``: Enables or disables hedged reads in sharded clusters. + + Hedged reads are automatically enabled in MongoDB 4.4+ when using a + ``nearest`` read preference. To explicitly enable hedged reads, set + the ``enabled`` key to ``true``:: + + >>> Nearest(hedge={'enabled': True}) + + To explicitly disable hedged reads, set the ``enabled`` key to + ``False``:: + + >>> Nearest(hedge={'enabled': False}) + + .. versionadded:: 3.11 + """ + return self.__hedge + + @property + def min_wire_version(self) -> int: + """The wire protocol version the server must support. + + Some read preferences impose version requirements on all servers (e.g. + maxStalenessSeconds requires MongoDB 3.4 / maxWireVersion 5). + + All servers' maxWireVersion must be at least this read preference's + `min_wire_version`, or the driver raises + :exc:`~pymongo.errors.ConfigurationError`. + """ + return 0 if self.__max_staleness == -1 else 5 + + def __repr__(self) -> str: + return "{}(tag_sets={!r}, max_staleness={!r}, hedge={!r})".format( + self.name, + self.__tag_sets, + self.__max_staleness, + self.__hedge, + ) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, _ServerMode): + return ( + self.mode == other.mode + and self.tag_sets == other.tag_sets + and self.max_staleness == other.max_staleness + and self.hedge == other.hedge + ) + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __getstate__(self) -> Dict[str, Any]: + """Return value of object for pickling. + + Needed explicitly because __slots__() defined. + """ + return { + "mode": self.__mode, + "tag_sets": self.__tag_sets, + "max_staleness": self.__max_staleness, + "hedge": self.__hedge, + } + + def __setstate__(self, value: Mapping[str, Any]) -> None: + """Restore from pickling.""" + self.__mode = value["mode"] + self.__mongos_mode = _MONGOS_MODES[self.__mode] + self.__tag_sets = _validate_tag_sets(value["tag_sets"]) + self.__max_staleness = _validate_max_staleness(value["max_staleness"]) + self.__hedge = _validate_hedge(value["hedge"]) + + def __call__(self, selection: Selection) -> Selection: + return selection + + +class Primary(_ServerMode): + """Primary read preference. + + * When directly connected to one mongod queries are allowed if the server + is standalone or a replica set primary. + * When connected to a mongos queries are sent to the primary of a shard. + * When connected to a replica set queries are sent to the primary of + the replica set. + """ + + __slots__ = () + + def __init__(self) -> None: + super().__init__(_PRIMARY) + + def __call__(self, selection: Selection) -> Selection: + """Apply this read preference to a Selection.""" + return selection.primary_selection + + def __repr__(self) -> str: + return "Primary()" + + def __eq__(self, other: Any) -> bool: + if isinstance(other, _ServerMode): + return other.mode == _PRIMARY + return NotImplemented + + +class PrimaryPreferred(_ServerMode): + """PrimaryPreferred read preference. + + * When directly connected to one mongod queries are allowed to standalone + servers, to a replica set primary, or to replica set secondaries. + * When connected to a mongos queries are sent to the primary of a shard if + available, otherwise a shard secondary. + * When connected to a replica set queries are sent to the primary if + available, otherwise a secondary. + + .. note:: When a :class:`~pymongo.mongo_client.MongoClient` is first + created reads will be routed to an available secondary until the + primary of the replica set is discovered. + + :Parameters: + - `tag_sets`: The :attr:`~tag_sets` to use if the primary is not + available. + - `max_staleness`: (integer, in seconds) The maximum estimated + length of time a replica set secondary can fall behind the primary in + replication before it will no longer be selected for operations. + Default -1, meaning no maximum. If it is set, it must be at least + 90 seconds. + - `hedge`: The :attr:`~hedge` to use if the primary is not available. + + .. versionchanged:: 3.11 + Added ``hedge`` parameter. + """ + + __slots__ = () + + def __init__( + self, + tag_sets: Optional[_TagSets] = None, + max_staleness: int = -1, + hedge: Optional[_Hedge] = None, + ) -> None: + super().__init__(_PRIMARY_PREFERRED, tag_sets, max_staleness, hedge) + + def __call__(self, selection: Selection) -> Selection: + """Apply this read preference to Selection.""" + if selection.primary: + return selection.primary_selection + else: + return secondary_with_tags_server_selector( + self.tag_sets, max_staleness_selectors.select(self.max_staleness, selection) + ) + + +class Secondary(_ServerMode): + """Secondary read preference. + + * When directly connected to one mongod queries are allowed to standalone + servers, to a replica set primary, or to replica set secondaries. + * When connected to a mongos queries are distributed among shard + secondaries. An error is raised if no secondaries are available. + * When connected to a replica set queries are distributed among + secondaries. An error is raised if no secondaries are available. + + :Parameters: + - `tag_sets`: The :attr:`~tag_sets` for this read preference. + - `max_staleness`: (integer, in seconds) The maximum estimated + length of time a replica set secondary can fall behind the primary in + replication before it will no longer be selected for operations. + Default -1, meaning no maximum. If it is set, it must be at least + 90 seconds. + - `hedge`: The :attr:`~hedge` for this read preference. + + .. versionchanged:: 3.11 + Added ``hedge`` parameter. + """ + + __slots__ = () + + def __init__( + self, + tag_sets: Optional[_TagSets] = None, + max_staleness: int = -1, + hedge: Optional[_Hedge] = None, + ) -> None: + super().__init__(_SECONDARY, tag_sets, max_staleness, hedge) + + def __call__(self, selection: Selection) -> Selection: + """Apply this read preference to Selection.""" + return secondary_with_tags_server_selector( + self.tag_sets, max_staleness_selectors.select(self.max_staleness, selection) + ) + + +class SecondaryPreferred(_ServerMode): + """SecondaryPreferred read preference. + + * When directly connected to one mongod queries are allowed to standalone + servers, to a replica set primary, or to replica set secondaries. + * When connected to a mongos queries are distributed among shard + secondaries, or the shard primary if no secondary is available. + * When connected to a replica set queries are distributed among + secondaries, or the primary if no secondary is available. + + .. note:: When a :class:`~pymongo.mongo_client.MongoClient` is first + created reads will be routed to the primary of the replica set until + an available secondary is discovered. + + :Parameters: + - `tag_sets`: The :attr:`~tag_sets` for this read preference. + - `max_staleness`: (integer, in seconds) The maximum estimated + length of time a replica set secondary can fall behind the primary in + replication before it will no longer be selected for operations. + Default -1, meaning no maximum. If it is set, it must be at least + 90 seconds. + - `hedge`: The :attr:`~hedge` for this read preference. + + .. versionchanged:: 3.11 + Added ``hedge`` parameter. + """ + + __slots__ = () + + def __init__( + self, + tag_sets: Optional[_TagSets] = None, + max_staleness: int = -1, + hedge: Optional[_Hedge] = None, + ) -> None: + super().__init__(_SECONDARY_PREFERRED, tag_sets, max_staleness, hedge) + + def __call__(self, selection: Selection) -> Selection: + """Apply this read preference to Selection.""" + secondaries = secondary_with_tags_server_selector( + self.tag_sets, max_staleness_selectors.select(self.max_staleness, selection) + ) + + if secondaries: + return secondaries + else: + return selection.primary_selection + + +class Nearest(_ServerMode): + """Nearest read preference. + + * When directly connected to one mongod queries are allowed to standalone + servers, to a replica set primary, or to replica set secondaries. + * When connected to a mongos queries are distributed among all members of + a shard. + * When connected to a replica set queries are distributed among all + members. + + :Parameters: + - `tag_sets`: The :attr:`~tag_sets` for this read preference. + - `max_staleness`: (integer, in seconds) The maximum estimated + length of time a replica set secondary can fall behind the primary in + replication before it will no longer be selected for operations. + Default -1, meaning no maximum. If it is set, it must be at least + 90 seconds. + - `hedge`: The :attr:`~hedge` for this read preference. + + .. versionchanged:: 3.11 + Added ``hedge`` parameter. + """ + + __slots__ = () + + def __init__( + self, + tag_sets: Optional[_TagSets] = None, + max_staleness: int = -1, + hedge: Optional[_Hedge] = None, + ) -> None: + super().__init__(_NEAREST, tag_sets, max_staleness, hedge) + + def __call__(self, selection: Selection) -> Selection: + """Apply this read preference to Selection.""" + return member_with_tags_server_selector( + self.tag_sets, max_staleness_selectors.select(self.max_staleness, selection) + ) + + +class _AggWritePref: + """Agg $out/$merge write preference. + + * If there are readable servers and there is any pre-5.0 server, use + primary read preference. + * Otherwise use `pref` read preference. + + :Parameters: + - `pref`: The read preference to use on MongoDB 5.0+. + """ + + __slots__ = ("pref", "effective_pref") + + def __init__(self, pref: _ServerMode): + self.pref = pref + self.effective_pref: _ServerMode = ReadPreference.PRIMARY + + def selection_hook(self, topology_description: TopologyDescription) -> None: + common_wv = topology_description.common_wire_version + if ( + topology_description.has_readable_server(ReadPreference.PRIMARY_PREFERRED) + and common_wv + and common_wv < 13 + ): + self.effective_pref = ReadPreference.PRIMARY + else: + self.effective_pref = self.pref + + def __call__(self, selection: Selection) -> Selection: + """Apply this read preference to a Selection.""" + return self.effective_pref(selection) + + def __repr__(self) -> str: + return f"_AggWritePref(pref={self.pref!r})" + + # Proxy other calls to the effective_pref so that _AggWritePref can be + # used in place of an actual read preference. + def __getattr__(self, name: str) -> Any: + return getattr(self.effective_pref, name) + + +_ALL_READ_PREFERENCES = (Primary, PrimaryPreferred, Secondary, SecondaryPreferred, Nearest) + + +def make_read_preference( + mode: int, tag_sets: Optional[_TagSets], max_staleness: int = -1 +) -> _ServerMode: + if mode == _PRIMARY: + if tag_sets not in (None, [{}]): + raise ConfigurationError("Read preference primary cannot be combined with tags") + if max_staleness != -1: + raise ConfigurationError( + "Read preference primary cannot be combined with maxStalenessSeconds" + ) + return Primary() + return _ALL_READ_PREFERENCES[mode](tag_sets, max_staleness) # type: ignore + + +_MODES = ( + "PRIMARY", + "PRIMARY_PREFERRED", + "SECONDARY", + "SECONDARY_PREFERRED", + "NEAREST", +) + + +class ReadPreference: + """An enum that defines some commonly used read preference modes. + + Apps can also create a custom read preference, for example:: + + Nearest(tag_sets=[{"node":"analytics"}]) + + See :doc:`/examples/high_availability` for code examples. + + A read preference is used in three cases: + + :class:`~pymongo.mongo_client.MongoClient` connected to a single mongod: + + - ``PRIMARY``: Queries are allowed if the server is standalone or a replica + set primary. + - All other modes allow queries to standalone servers, to a replica set + primary, or to replica set secondaries. + + :class:`~pymongo.mongo_client.MongoClient` initialized with the + ``replicaSet`` option: + + - ``PRIMARY``: Read from the primary. This is the default, and provides the + strongest consistency. If no primary is available, raise + :class:`~pymongo.errors.AutoReconnect`. + + - ``PRIMARY_PREFERRED``: Read from the primary if available, or if there is + none, read from a secondary. + + - ``SECONDARY``: Read from a secondary. If no secondary is available, + raise :class:`~pymongo.errors.AutoReconnect`. + + - ``SECONDARY_PREFERRED``: Read from a secondary if available, otherwise + from the primary. + + - ``NEAREST``: Read from any member. + + :class:`~pymongo.mongo_client.MongoClient` connected to a mongos, with a + sharded cluster of replica sets: + + - ``PRIMARY``: Read from the primary of the shard, or raise + :class:`~pymongo.errors.OperationFailure` if there is none. + This is the default. + + - ``PRIMARY_PREFERRED``: Read from the primary of the shard, or if there is + none, read from a secondary of the shard. + + - ``SECONDARY``: Read from a secondary of the shard, or raise + :class:`~pymongo.errors.OperationFailure` if there is none. + + - ``SECONDARY_PREFERRED``: Read from a secondary of the shard if available, + otherwise from the shard primary. + + - ``NEAREST``: Read from any shard member. + """ + + PRIMARY = Primary() + PRIMARY_PREFERRED = PrimaryPreferred() + SECONDARY = Secondary() + SECONDARY_PREFERRED = SecondaryPreferred() + NEAREST = Nearest() + + +def read_pref_mode_from_name(name: str) -> int: + """Get the read preference mode from mongos/uri name.""" + return _MONGOS_MODES.index(name) + + +class MovingAverage: + """Tracks an exponentially-weighted moving average.""" + + average: Optional[float] + + def __init__(self) -> None: + self.average = None + + def add_sample(self, sample: float) -> None: + if sample < 0: + # Likely system time change while waiting for hello response + # and not using time.monotonic. Ignore it, the next one will + # probably be valid. + return + if self.average is None: + self.average = sample + else: + # The Server Selection Spec requires an exponentially weighted + # average with alpha = 0.2. + self.average = 0.8 * self.average + 0.2 * sample + + def get(self) -> Optional[float]: + """Get the calculated average, or None if no samples yet.""" + return self.average + + def reset(self) -> None: + self.average = None diff --git a/backend/test/lib/python3.8/site-packages/pymongo/response.py b/backend/test/lib/python3.8/site-packages/pymongo/response.py new file mode 100644 index 0000000000000000000000000000000000000000..c236754b3ab56577fca7fd98cd28fe017da49431 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/response.py @@ -0,0 +1,133 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Represent a response from the server.""" +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, List, Mapping, Optional, Sequence, Union + +if TYPE_CHECKING: + from datetime import timedelta + + from pymongo.message import _OpMsg, _OpReply + from pymongo.pool import Connection + from pymongo.typings import _Address, _DocumentOut + + +class Response: + __slots__ = ("_data", "_address", "_request_id", "_duration", "_from_command", "_docs") + + def __init__( + self, + data: Union[_OpMsg, _OpReply], + address: _Address, + request_id: int, + duration: Optional[timedelta], + from_command: bool, + docs: Sequence[Mapping[str, Any]], + ): + """Represent a response from the server. + + :Parameters: + - `data`: A network response message. + - `address`: (host, port) of the source server. + - `request_id`: The request id of this operation. + - `duration`: The duration of the operation. + - `from_command`: if the response is the result of a db command. + """ + self._data = data + self._address = address + self._request_id = request_id + self._duration = duration + self._from_command = from_command + self._docs = docs + + @property + def data(self) -> Union[_OpMsg, _OpReply]: + """Server response's raw BSON bytes.""" + return self._data + + @property + def address(self) -> _Address: + """(host, port) of the source server.""" + return self._address + + @property + def request_id(self) -> int: + """The request id of this operation.""" + return self._request_id + + @property + def duration(self) -> Optional[timedelta]: + """The duration of the operation.""" + return self._duration + + @property + def from_command(self) -> bool: + """If the response is a result from a db command.""" + return self._from_command + + @property + def docs(self) -> Sequence[Mapping[str, Any]]: + """The decoded document(s).""" + return self._docs + + +class PinnedResponse(Response): + __slots__ = ("_conn", "_more_to_come") + + def __init__( + self, + data: Union[_OpMsg, _OpReply], + address: _Address, + conn: Connection, + request_id: int, + duration: Optional[timedelta], + from_command: bool, + docs: List[_DocumentOut], + more_to_come: bool, + ): + """Represent a response to an exhaust cursor's initial query. + + :Parameters: + - `data`: A network response message. + - `address`: (host, port) of the source server. + - `conn`: The Connection used for the initial query. + - `request_id`: The request id of this operation. + - `duration`: The duration of the operation. + - `from_command`: If the response is the result of a db command. + - `docs`: List of documents. + - `more_to_come`: Bool indicating whether cursor is ready to be + exhausted. + """ + super().__init__(data, address, request_id, duration, from_command, docs) + self._conn = conn + self._more_to_come = more_to_come + + @property + def conn(self) -> Connection: + """The Connection used for the initial query. + + The server will send batches on this socket, without waiting for + getMores from the client, until the result set is exhausted or there + is an error. + """ + return self._conn + + @property + def more_to_come(self) -> bool: + """If true, server is ready to send batches on the socket until the + result set is exhausted or there is an error. + """ + return self._more_to_come diff --git a/backend/test/lib/python3.8/site-packages/pymongo/results.py b/backend/test/lib/python3.8/site-packages/pymongo/results.py new file mode 100644 index 0000000000000000000000000000000000000000..3676d4a4839a51e261fc5b760a163bec1cade7f2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/results.py @@ -0,0 +1,219 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Result class definitions.""" +from typing import Any, Dict, List, Mapping, Optional, cast + +from pymongo.errors import InvalidOperation + + +class _WriteResult: + """Base class for write result classes.""" + + __slots__ = ("__acknowledged",) + + def __init__(self, acknowledged: bool) -> None: + self.__acknowledged = acknowledged + + def _raise_if_unacknowledged(self, property_name: str) -> None: + """Raise an exception on property access if unacknowledged.""" + if not self.__acknowledged: + raise InvalidOperation( + "A value for {} is not available when " + "the write is unacknowledged. Check the " + "acknowledged attribute to avoid this " + "error.".format(property_name) + ) + + @property + def acknowledged(self) -> bool: + """Is this the result of an acknowledged write operation? + + The :attr:`acknowledged` attribute will be ``False`` when using + ``WriteConcern(w=0)``, otherwise ``True``. + + .. note:: + If the :attr:`acknowledged` attribute is ``False`` all other + attributes of this class will raise + :class:`~pymongo.errors.InvalidOperation` when accessed. Values for + other attributes cannot be determined if the write operation was + unacknowledged. + + .. seealso:: + :class:`~pymongo.write_concern.WriteConcern` + """ + return self.__acknowledged + + +class InsertOneResult(_WriteResult): + """The return type for :meth:`~pymongo.collection.Collection.insert_one`.""" + + __slots__ = ("__inserted_id",) + + def __init__(self, inserted_id: Any, acknowledged: bool) -> None: + self.__inserted_id = inserted_id + super().__init__(acknowledged) + + @property + def inserted_id(self) -> Any: + """The inserted document's _id.""" + return self.__inserted_id + + +class InsertManyResult(_WriteResult): + """The return type for :meth:`~pymongo.collection.Collection.insert_many`.""" + + __slots__ = ("__inserted_ids",) + + def __init__(self, inserted_ids: List[Any], acknowledged: bool) -> None: + self.__inserted_ids = inserted_ids + super().__init__(acknowledged) + + @property + def inserted_ids(self) -> List: + """A list of _ids of the inserted documents, in the order provided. + + .. note:: If ``False`` is passed for the `ordered` parameter to + :meth:`~pymongo.collection.Collection.insert_many` the server + may have inserted the documents in a different order than what + is presented here. + """ + return self.__inserted_ids + + +class UpdateResult(_WriteResult): + """The return type for :meth:`~pymongo.collection.Collection.update_one`, + :meth:`~pymongo.collection.Collection.update_many`, and + :meth:`~pymongo.collection.Collection.replace_one`. + """ + + __slots__ = ("__raw_result",) + + def __init__(self, raw_result: Optional[Mapping[str, Any]], acknowledged: bool): + self.__raw_result = raw_result + super().__init__(acknowledged) + + @property + def raw_result(self) -> Optional[Mapping[str, Any]]: + """The raw result document returned by the server.""" + return self.__raw_result + + @property + def matched_count(self) -> int: + """The number of documents matched for this update.""" + self._raise_if_unacknowledged("matched_count") + if self.upserted_id is not None: + return 0 + assert self.__raw_result is not None + return self.__raw_result.get("n", 0) + + @property + def modified_count(self) -> int: + """The number of documents modified.""" + self._raise_if_unacknowledged("modified_count") + assert self.__raw_result is not None + return cast(int, self.__raw_result.get("nModified")) + + @property + def upserted_id(self) -> Any: + """The _id of the inserted document if an upsert took place. Otherwise + ``None``. + """ + self._raise_if_unacknowledged("upserted_id") + assert self.__raw_result is not None + return self.__raw_result.get("upserted") + + +class DeleteResult(_WriteResult): + """The return type for :meth:`~pymongo.collection.Collection.delete_one` + and :meth:`~pymongo.collection.Collection.delete_many` + """ + + __slots__ = ("__raw_result",) + + def __init__(self, raw_result: Mapping[str, Any], acknowledged: bool) -> None: + self.__raw_result = raw_result + super().__init__(acknowledged) + + @property + def raw_result(self) -> Mapping[str, Any]: + """The raw result document returned by the server.""" + return self.__raw_result + + @property + def deleted_count(self) -> int: + """The number of documents deleted.""" + self._raise_if_unacknowledged("deleted_count") + return self.__raw_result.get("n", 0) + + +class BulkWriteResult(_WriteResult): + """An object wrapper for bulk API write results.""" + + __slots__ = ("__bulk_api_result",) + + def __init__(self, bulk_api_result: Dict[str, Any], acknowledged: bool) -> None: + """Create a BulkWriteResult instance. + + :Parameters: + - `bulk_api_result`: A result dict from the bulk API + - `acknowledged`: Was this write result acknowledged? If ``False`` + then all properties of this object will raise + :exc:`~pymongo.errors.InvalidOperation`. + """ + self.__bulk_api_result = bulk_api_result + super().__init__(acknowledged) + + @property + def bulk_api_result(self) -> Dict[str, Any]: + """The raw bulk API result.""" + return self.__bulk_api_result + + @property + def inserted_count(self) -> int: + """The number of documents inserted.""" + self._raise_if_unacknowledged("inserted_count") + return cast(int, self.__bulk_api_result.get("nInserted")) + + @property + def matched_count(self) -> int: + """The number of documents matched for an update.""" + self._raise_if_unacknowledged("matched_count") + return cast(int, self.__bulk_api_result.get("nMatched")) + + @property + def modified_count(self) -> int: + """The number of documents modified.""" + self._raise_if_unacknowledged("modified_count") + return cast(int, self.__bulk_api_result.get("nModified")) + + @property + def deleted_count(self) -> int: + """The number of documents deleted.""" + self._raise_if_unacknowledged("deleted_count") + return cast(int, self.__bulk_api_result.get("nRemoved")) + + @property + def upserted_count(self) -> int: + """The number of documents upserted.""" + self._raise_if_unacknowledged("upserted_count") + return cast(int, self.__bulk_api_result.get("nUpserted")) + + @property + def upserted_ids(self) -> Optional[Dict[int, Any]]: + """A map of operation index to the _id of the upserted document.""" + self._raise_if_unacknowledged("upserted_ids") + if self.__bulk_api_result: + return {upsert["index"]: upsert["_id"] for upsert in self.bulk_api_result["upserted"]} + return None diff --git a/backend/test/lib/python3.8/site-packages/pymongo/saslprep.py b/backend/test/lib/python3.8/site-packages/pymongo/saslprep.py new file mode 100644 index 0000000000000000000000000000000000000000..34c0182a53a1198f62928d0c775d72a56448b8db --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/saslprep.py @@ -0,0 +1,113 @@ +# Copyright 2016-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""An implementation of RFC4013 SASLprep.""" +from typing import Any, Optional + +try: + import stringprep +except ImportError: + HAVE_STRINGPREP = False + + def saslprep(data: Any, prohibit_unassigned_code_points: Optional[bool] = True) -> str: + """SASLprep dummy""" + if isinstance(data, str): + raise TypeError( + "The stringprep module is not available. Usernames and " + "passwords must be instances of bytes." + ) + return data + +else: + HAVE_STRINGPREP = True + import unicodedata + + # RFC4013 section 2.3 prohibited output. + _PROHIBITED = ( + # A strict reading of RFC 4013 requires table c12 here, but + # characters from it are mapped to SPACE in the Map step. Can + # normalization reintroduce them somehow? + stringprep.in_table_c12, + stringprep.in_table_c21_c22, + stringprep.in_table_c3, + stringprep.in_table_c4, + stringprep.in_table_c5, + stringprep.in_table_c6, + stringprep.in_table_c7, + stringprep.in_table_c8, + stringprep.in_table_c9, + ) + + def saslprep(data: Any, prohibit_unassigned_code_points: Optional[bool] = True) -> str: + """An implementation of RFC4013 SASLprep. + + :Parameters: + - `data`: The string to SASLprep. Unicode strings + (:class:`str`) are supported. Byte strings + (:class:`bytes`) are ignored. + - `prohibit_unassigned_code_points`: True / False. RFC 3454 + and RFCs for various SASL mechanisms distinguish between + `queries` (unassigned code points allowed) and + `stored strings` (unassigned code points prohibited). Defaults + to ``True`` (unassigned code points are prohibited). + + :Returns: + The SASLprep'ed version of `data`. + """ + prohibited: Any + + if not isinstance(data, str): + return data + + if prohibit_unassigned_code_points: + prohibited = (*_PROHIBITED, stringprep.in_table_a1) + else: + prohibited = _PROHIBITED + + # RFC3454 section 2, step 1 - Map + # RFC4013 section 2.1 mappings + # Map Non-ASCII space characters to SPACE (U+0020). Map + # commonly mapped to nothing characters to, well, nothing. + in_table_c12 = stringprep.in_table_c12 + in_table_b1 = stringprep.in_table_b1 + data = "".join( + ["\u0020" if in_table_c12(elt) else elt for elt in data if not in_table_b1(elt)] + ) + + # RFC3454 section 2, step 2 - Normalize + # RFC4013 section 2.2 normalization + data = unicodedata.ucd_3_2_0.normalize("NFKC", data) + + in_table_d1 = stringprep.in_table_d1 + if in_table_d1(data[0]): + if not in_table_d1(data[-1]): + # RFC3454, Section 6, #3. If a string contains any + # RandALCat character, the first and last characters + # MUST be RandALCat characters. + raise ValueError("SASLprep: failed bidirectional check") + # RFC3454, Section 6, #2. If a string contains any RandALCat + # character, it MUST NOT contain any LCat character. + prohibited = (*prohibited, stringprep.in_table_d2) + else: + # RFC3454, Section 6, #3. Following the logic of #3, if + # the first character is not a RandALCat, no other character + # can be either. + prohibited = (*prohibited, in_table_d1) + + # RFC3454 section 2, step 3 and 4 - Prohibit and check bidi + for char in data: + if any(in_table(char) for in_table in prohibited): + raise ValueError("SASLprep: failed prohibited character check") + + return data diff --git a/backend/test/lib/python3.8/site-packages/pymongo/server.py b/backend/test/lib/python3.8/site-packages/pymongo/server.py new file mode 100644 index 0000000000000000000000000000000000000000..2fe2443eecac07d780352dad965aebf038423e65 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/server.py @@ -0,0 +1,293 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Communicate with one MongoDB server in a topology.""" +from __future__ import annotations + +from datetime import datetime +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ContextManager, + List, + Optional, + Tuple, + Union, +) + +from bson import _decode_all_selective +from pymongo.errors import NotPrimaryError, OperationFailure +from pymongo.helpers import _check_command_response, _handle_reauth +from pymongo.message import _convert_exception, _GetMore, _OpMsg, _Query +from pymongo.response import PinnedResponse, Response + +if TYPE_CHECKING: + from queue import Queue + from weakref import ReferenceType + + from bson.objectid import ObjectId + from pymongo.mongo_client import _MongoClientErrorHandler + from pymongo.monitor import Monitor + from pymongo.monitoring import _EventListeners + from pymongo.pool import Connection, Pool + from pymongo.read_preferences import _ServerMode + from pymongo.server_description import ServerDescription + from pymongo.typings import _DocumentOut + +_CURSOR_DOC_FIELDS = {"cursor": {"firstBatch": 1, "nextBatch": 1}} + + +class Server: + def __init__( + self, + server_description: ServerDescription, + pool: Pool, + monitor: Monitor, + topology_id: Optional[ObjectId] = None, + listeners: Optional[_EventListeners] = None, + events: Optional[ReferenceType[Queue]] = None, + ) -> None: + """Represent one MongoDB server.""" + self._description = server_description + self._pool = pool + self._monitor = monitor + self._topology_id = topology_id + self._publish = listeners is not None and listeners.enabled_for_server + self._listener = listeners + self._events = None + if self._publish: + self._events = events() # type: ignore[misc] + + def open(self) -> None: + """Start monitoring, or restart after a fork. + + Multiple calls have no effect. + """ + if not self._pool.opts.load_balanced: + self._monitor.open() + + def reset(self, service_id: Optional[ObjectId] = None) -> None: + """Clear the connection pool.""" + self.pool.reset(service_id) + + def close(self) -> None: + """Clear the connection pool and stop the monitor. + + Reconnect with open(). + """ + if self._publish: + assert self._listener is not None + assert self._events is not None + self._events.put( + ( + self._listener.publish_server_closed, + (self._description.address, self._topology_id), + ) + ) + self._monitor.close() + self._pool.reset_without_pause() + + def request_check(self) -> None: + """Check the server's state soon.""" + self._monitor.request_check() + + @_handle_reauth + def run_operation( + self, + conn: Connection, + operation: Union[_Query, _GetMore], + read_preference: _ServerMode, + listeners: Optional[_EventListeners], + unpack_res: Callable[..., List[_DocumentOut]], + ) -> Response: + """Run a _Query or _GetMore operation and return a Response object. + + This method is used only to run _Query/_GetMore operations from + cursors. + Can raise ConnectionFailure, OperationFailure, etc. + + :Parameters: + - `conn`: A Connection instance. + - `operation`: A _Query or _GetMore object. + - `read_preference`: The read preference to use. + - `listeners`: Instance of _EventListeners or None. + - `unpack_res`: A callable that decodes the wire protocol response. + """ + duration = None + assert listeners is not None + publish = listeners.enabled_for_commands + if publish: + start = datetime.now() + + use_cmd = operation.use_command(conn) + more_to_come = operation.conn_mgr and operation.conn_mgr.more_to_come + if more_to_come: + request_id = 0 + else: + message = operation.get_message(read_preference, conn, use_cmd) + request_id, data, max_doc_size = self._split_message(message) + + if publish: + cmd, dbn = operation.as_command(conn) + assert listeners is not None + listeners.publish_command_start( + cmd, dbn, request_id, conn.address, service_id=conn.service_id + ) + start = datetime.now() + + try: + if more_to_come: + reply = conn.receive_message(None) + else: + conn.send_message(data, max_doc_size) + reply = conn.receive_message(request_id) + + # Unpack and check for command errors. + if use_cmd: + user_fields = _CURSOR_DOC_FIELDS + legacy_response = False + else: + user_fields = None + legacy_response = True + docs = unpack_res( + reply, + operation.cursor_id, + operation.codec_options, + legacy_response=legacy_response, + user_fields=user_fields, + ) + if use_cmd: + first = docs[0] + operation.client._process_response(first, operation.session) + _check_command_response(first, conn.max_wire_version) + except Exception as exc: + if publish: + duration = datetime.now() - start + if isinstance(exc, (NotPrimaryError, OperationFailure)): + failure: _DocumentOut = exc.details # type: ignore[assignment] + else: + failure = _convert_exception(exc) + assert listeners is not None + listeners.publish_command_failure( + duration, + failure, + operation.name, + request_id, + conn.address, + service_id=conn.service_id, + ) + raise + + if publish: + duration = datetime.now() - start + # Must publish in find / getMore / explain command response + # format. + if use_cmd: + res: _DocumentOut = docs[0] + elif operation.name == "explain": + res = docs[0] if docs else {} + else: + res = {"cursor": {"id": reply.cursor_id, "ns": operation.namespace()}, "ok": 1} # type: ignore[union-attr] + if operation.name == "find": + res["cursor"]["firstBatch"] = docs + else: + res["cursor"]["nextBatch"] = docs + assert listeners is not None + listeners.publish_command_success( + duration, + res, + operation.name, + request_id, + conn.address, + service_id=conn.service_id, + ) + + # Decrypt response. + client = operation.client + if client and client._encrypter: + if use_cmd: + decrypted = client._encrypter.decrypt(reply.raw_command_response()) + docs = _decode_all_selective(decrypted, operation.codec_options, user_fields) + + response: Response + + if client._should_pin_cursor(operation.session) or operation.exhaust: + conn.pin_cursor() + if isinstance(reply, _OpMsg): + # In OP_MSG, the server keeps sending only if the + # more_to_come flag is set. + more_to_come = reply.more_to_come + else: + # In OP_REPLY, the server keeps sending until cursor_id is 0. + more_to_come = bool(operation.exhaust and reply.cursor_id) + if operation.conn_mgr: + operation.conn_mgr.update_exhaust(more_to_come) + response = PinnedResponse( + data=reply, + address=self._description.address, + conn=conn, + duration=duration, + request_id=request_id, + from_command=use_cmd, + docs=docs, + more_to_come=more_to_come, + ) + else: + response = Response( + data=reply, + address=self._description.address, + duration=duration, + request_id=request_id, + from_command=use_cmd, + docs=docs, + ) + + return response + + def checkout( + self, handler: Optional[_MongoClientErrorHandler] = None + ) -> ContextManager[Connection]: + return self.pool.checkout(handler) + + @property + def description(self) -> ServerDescription: + return self._description + + @description.setter + def description(self, server_description: ServerDescription) -> None: + assert server_description.address == self._description.address + self._description = server_description + + @property + def pool(self) -> Pool: + return self._pool + + def _split_message( + self, message: Union[Tuple[int, Any], Tuple[int, Any, int]] + ) -> Tuple[int, Any, int]: + """Return request_id, data, max_doc_size. + + :Parameters: + - `message`: (request_id, data, max_doc_size) or (request_id, data) + """ + if len(message) == 3: + return message # type: ignore[return-value] + else: + # get_more and kill_cursors messages don't include BSON documents. + request_id, data = message # type: ignore[misc] + return request_id, data, 0 + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self._description!r}>" diff --git a/backend/test/lib/python3.8/site-packages/pymongo/server_api.py b/backend/test/lib/python3.8/site-packages/pymongo/server_api.py new file mode 100644 index 0000000000000000000000000000000000000000..47812818de998b95c7ba91907425e485b40609b8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/server_api.py @@ -0,0 +1,175 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Support for MongoDB Stable API. + +.. _versioned-api-ref: + +MongoDB Stable API +===================== + +Starting in MongoDB 5.0, applications can specify the server API version +to use when creating a :class:`~pymongo.mongo_client.MongoClient`. Doing so +ensures that the driver behaves in a manner compatible with that server API +version, regardless of the server's actual release version. + +Declaring an API Version +```````````````````````` + +.. attention:: Stable API requires MongoDB >=5.0. + +To configure MongoDB Stable API, pass the ``server_api`` keyword option to +:class:`~pymongo.mongo_client.MongoClient`:: + + >>> from pymongo.mongo_client import MongoClient + >>> from pymongo.server_api import ServerApi + >>> + >>> # Declare API version "1" for MongoClient "client" + >>> server_api = ServerApi('1') + >>> client = MongoClient(server_api=server_api) + +The declared API version is applied to all commands run through ``client``, +including those sent through the generic +:meth:`~pymongo.database.Database.command` helper. + +.. note:: Declaring an API version on the + :class:`~pymongo.mongo_client.MongoClient` **and** specifying stable + API options in :meth:`~pymongo.database.Database.command` command document + is not supported and will lead to undefined behaviour. + +To run any command without declaring a server API version or using a different +API version, create a separate :class:`~pymongo.mongo_client.MongoClient` +instance. + +Strict Mode +``````````` + +Configuring ``strict`` mode will cause the MongoDB server to reject all +commands that are not part of the declared :attr:`ServerApi.version`. This +includes command options and aggregation pipeline stages. + +For example:: + + >>> server_api = ServerApi('1', strict=True) + >>> client = MongoClient(server_api=server_api) + >>> client.test.command('count', 'test') + Traceback (most recent call last): + ... + pymongo.errors.OperationFailure: Provided apiStrict:true, but the command count is not in API Version 1, full error: {'ok': 0.0, 'errmsg': 'Provided apiStrict:true, but the command count is not in API Version 1', 'code': 323, 'codeName': 'APIStrictError' + +Detecting API Deprecations +`````````````````````````` + +The ``deprecationErrors`` option can be used to enable command failures +when using functionality that is deprecated from the configured +:attr:`ServerApi.version`. For example:: + + >>> server_api = ServerApi('1', deprecation_errors=True) + >>> client = MongoClient(server_api=server_api) + +Note that at the time of this writing, no deprecated APIs exist. + +Classes +======= +""" +from __future__ import annotations + +from typing import Any, MutableMapping, Optional + + +class ServerApiVersion: + """An enum that defines values for :attr:`ServerApi.version`. + + .. versionadded:: 3.12 + """ + + V1 = "1" + """Server API version "1".""" + + +class ServerApi: + """MongoDB Stable API.""" + + def __init__( + self, version: str, strict: Optional[bool] = None, deprecation_errors: Optional[bool] = None + ): + """Options to configure MongoDB Stable API. + + :Parameters: + - `version`: The API version string. Must be one of the values in + :class:`ServerApiVersion`. + - `strict` (optional): Set to ``True`` to enable API strict mode. + Defaults to ``None`` which means "use the server's default". + - `deprecation_errors` (optional): Set to ``True`` to enable + deprecation errors. Defaults to ``None`` which means "use the + server's default". + + .. versionadded:: 3.12 + """ + if version != ServerApiVersion.V1: + raise ValueError(f"Unknown ServerApi version: {version}") + if strict is not None and not isinstance(strict, bool): + raise TypeError( + "Wrong type for ServerApi strict, value must be an instance " + "of bool, not {}".format(type(strict)) + ) + if deprecation_errors is not None and not isinstance(deprecation_errors, bool): + raise TypeError( + "Wrong type for ServerApi deprecation_errors, value must be " + "an instance of bool, not {}".format(type(deprecation_errors)) + ) + self._version = version + self._strict = strict + self._deprecation_errors = deprecation_errors + + @property + def version(self) -> str: + """The API version setting. + + This value is sent to the server in the "apiVersion" field. + """ + return self._version + + @property + def strict(self) -> Optional[bool]: + """The API strict mode setting. + + When set, this value is sent to the server in the "apiStrict" field. + """ + return self._strict + + @property + def deprecation_errors(self) -> Optional[bool]: + """The API deprecation errors setting. + + When set, this value is sent to the server in the + "apiDeprecationErrors" field. + """ + return self._deprecation_errors + + +def _add_to_command(cmd: MutableMapping[str, Any], server_api: Optional[ServerApi]) -> None: + """Internal helper which adds API versioning options to a command. + + :Parameters: + - `cmd`: The command. + - `server_api` (optional): A :class:`ServerApi` or ``None``. + """ + if not server_api: + return + cmd["apiVersion"] = server_api.version + if server_api.strict is not None: + cmd["apiStrict"] = server_api.strict + if server_api.deprecation_errors is not None: + cmd["apiDeprecationErrors"] = server_api.deprecation_errors diff --git a/backend/test/lib/python3.8/site-packages/pymongo/server_description.py b/backend/test/lib/python3.8/site-packages/pymongo/server_description.py new file mode 100644 index 0000000000000000000000000000000000000000..c2fa0305370492662facfaea9110cadc02a4e3d7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/server_description.py @@ -0,0 +1,300 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Represent one server the driver is connected to.""" + +import time +import warnings +from typing import Any, Dict, Mapping, Optional, Set, Tuple + +from bson import EPOCH_NAIVE +from bson.objectid import ObjectId +from pymongo.hello import Hello +from pymongo.server_type import SERVER_TYPE +from pymongo.typings import ClusterTime, _Address + + +class ServerDescription: + """Immutable representation of one server. + + :Parameters: + - `address`: A (host, port) pair + - `hello`: Optional Hello instance + - `round_trip_time`: Optional float + - `error`: Optional, the last error attempting to connect to the server + - `round_trip_time`: Optional float, the min latency from the most recent samples + """ + + __slots__ = ( + "_address", + "_server_type", + "_all_hosts", + "_tags", + "_replica_set_name", + "_primary", + "_max_bson_size", + "_max_message_size", + "_max_write_batch_size", + "_min_wire_version", + "_max_wire_version", + "_round_trip_time", + "_min_round_trip_time", + "_me", + "_is_writable", + "_is_readable", + "_ls_timeout_minutes", + "_error", + "_set_version", + "_election_id", + "_cluster_time", + "_last_write_date", + "_last_update_time", + "_topology_version", + ) + + def __init__( + self, + address: _Address, + hello: Optional[Hello] = None, + round_trip_time: Optional[float] = None, + error: Optional[Exception] = None, + min_round_trip_time: float = 0.0, + ) -> None: + self._address = address + if not hello: + hello = Hello({}) + + self._server_type = hello.server_type + self._all_hosts = hello.all_hosts + self._tags = hello.tags + self._replica_set_name = hello.replica_set_name + self._primary = hello.primary + self._max_bson_size = hello.max_bson_size + self._max_message_size = hello.max_message_size + self._max_write_batch_size = hello.max_write_batch_size + self._min_wire_version = hello.min_wire_version + self._max_wire_version = hello.max_wire_version + self._set_version = hello.set_version + self._election_id = hello.election_id + self._cluster_time = hello.cluster_time + self._is_writable = hello.is_writable + self._is_readable = hello.is_readable + self._ls_timeout_minutes = hello.logical_session_timeout_minutes + self._round_trip_time = round_trip_time + self._min_round_trip_time = min_round_trip_time + self._me = hello.me + self._last_update_time = time.monotonic() + self._error = error + self._topology_version = hello.topology_version + if error: + details = getattr(error, "details", None) + if isinstance(details, dict): + self._topology_version = details.get("topologyVersion") + + self._last_write_date: Optional[float] + if hello.last_write_date: + # Convert from datetime to seconds. + delta = hello.last_write_date - EPOCH_NAIVE + self._last_write_date = delta.total_seconds() + else: + self._last_write_date = None + + @property + def address(self) -> _Address: + """The address (host, port) of this server.""" + return self._address + + @property + def server_type(self) -> int: + """The type of this server.""" + return self._server_type + + @property + def server_type_name(self) -> str: + """The server type as a human readable string. + + .. versionadded:: 3.4 + """ + return SERVER_TYPE._fields[self._server_type] + + @property + def all_hosts(self) -> Set[Tuple[str, int]]: + """List of hosts, passives, and arbiters known to this server.""" + return self._all_hosts + + @property + def tags(self) -> Mapping[str, Any]: + return self._tags + + @property + def replica_set_name(self) -> Optional[str]: + """Replica set name or None.""" + return self._replica_set_name + + @property + def primary(self) -> Optional[Tuple[str, int]]: + """This server's opinion about who the primary is, or None.""" + return self._primary + + @property + def max_bson_size(self) -> int: + return self._max_bson_size + + @property + def max_message_size(self) -> int: + return self._max_message_size + + @property + def max_write_batch_size(self) -> int: + return self._max_write_batch_size + + @property + def min_wire_version(self) -> int: + return self._min_wire_version + + @property + def max_wire_version(self) -> int: + return self._max_wire_version + + @property + def set_version(self) -> Optional[int]: + return self._set_version + + @property + def election_id(self) -> Optional[ObjectId]: + return self._election_id + + @property + def cluster_time(self) -> Optional[ClusterTime]: + return self._cluster_time + + @property + def election_tuple(self) -> Tuple[Optional[int], Optional[ObjectId]]: + warnings.warn( + "'election_tuple' is deprecated, use 'set_version' and 'election_id' instead", + DeprecationWarning, + stacklevel=2, + ) + return self._set_version, self._election_id + + @property + def me(self) -> Optional[Tuple[str, int]]: + return self._me + + @property + def logical_session_timeout_minutes(self) -> Optional[int]: + return self._ls_timeout_minutes + + @property + def last_write_date(self) -> Optional[float]: + return self._last_write_date + + @property + def last_update_time(self) -> float: + return self._last_update_time + + @property + def round_trip_time(self) -> Optional[float]: + """The current average latency or None.""" + # This override is for unittesting only! + if self._address in self._host_to_round_trip_time: + return self._host_to_round_trip_time[self._address] + + return self._round_trip_time + + @property + def min_round_trip_time(self) -> float: + """The min latency from the most recent samples.""" + return self._min_round_trip_time + + @property + def error(self) -> Optional[Exception]: + """The last error attempting to connect to the server, or None.""" + return self._error + + @property + def is_writable(self) -> bool: + return self._is_writable + + @property + def is_readable(self) -> bool: + return self._is_readable + + @property + def mongos(self) -> bool: + return self._server_type == SERVER_TYPE.Mongos + + @property + def is_server_type_known(self) -> bool: + return self.server_type != SERVER_TYPE.Unknown + + @property + def retryable_writes_supported(self) -> bool: + """Checks if this server supports retryable writes.""" + return ( + self._ls_timeout_minutes is not None + and self._server_type in (SERVER_TYPE.Mongos, SERVER_TYPE.RSPrimary) + ) or self._server_type == SERVER_TYPE.LoadBalancer + + @property + def retryable_reads_supported(self) -> bool: + """Checks if this server supports retryable writes.""" + return self._max_wire_version >= 6 + + @property + def topology_version(self) -> Optional[Mapping[str, Any]]: + return self._topology_version + + def to_unknown(self, error: Optional[Exception] = None) -> "ServerDescription": + unknown = ServerDescription(self.address, error=error) + unknown._topology_version = self.topology_version + return unknown + + def __eq__(self, other: Any) -> bool: + if isinstance(other, ServerDescription): + return ( + (self._address == other.address) + and (self._server_type == other.server_type) + and (self._min_wire_version == other.min_wire_version) + and (self._max_wire_version == other.max_wire_version) + and (self._me == other.me) + and (self._all_hosts == other.all_hosts) + and (self._tags == other.tags) + and (self._replica_set_name == other.replica_set_name) + and (self._set_version == other.set_version) + and (self._election_id == other.election_id) + and (self._primary == other.primary) + and (self._ls_timeout_minutes == other.logical_session_timeout_minutes) + and (self._error == other.error) + ) + + return NotImplemented + + def __ne__(self, other: Any) -> bool: + return not self == other + + def __repr__(self) -> str: + errmsg = "" + if self.error: + errmsg = f", error={self.error!r}" + return "<{} {} server_type: {}, rtt: {}{}>".format( + self.__class__.__name__, + self.address, + self.server_type_name, + self.round_trip_time, + errmsg, + ) + + # For unittesting only. Use under no circumstances! + _host_to_round_trip_time: Dict = {} diff --git a/backend/test/lib/python3.8/site-packages/pymongo/server_selectors.py b/backend/test/lib/python3.8/site-packages/pymongo/server_selectors.py new file mode 100644 index 0000000000000000000000000000000000000000..ee6441d7de3807c98cf3a1b550d372a170192fab --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/server_selectors.py @@ -0,0 +1,174 @@ +# Copyright 2014-2016 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Criteria to select some ServerDescriptions from a TopologyDescription.""" +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, List, Mapping, Optional, Sequence, TypeVar, cast + +from pymongo.server_type import SERVER_TYPE + +if TYPE_CHECKING: + from pymongo.server_description import ServerDescription + from pymongo.topology_description import TopologyDescription + + +T = TypeVar("T") +TagSet = Mapping[str, Any] +TagSets = Sequence[TagSet] + + +class Selection: + """Input or output of a server selector function.""" + + @classmethod + def from_topology_description(cls, topology_description: TopologyDescription) -> Selection: + known_servers = topology_description.known_servers + primary = None + for sd in known_servers: + if sd.server_type == SERVER_TYPE.RSPrimary: + primary = sd + break + + return Selection( + topology_description, + topology_description.known_servers, + topology_description.common_wire_version, + primary, + ) + + def __init__( + self, + topology_description: TopologyDescription, + server_descriptions: List[ServerDescription], + common_wire_version: Optional[int], + primary: Optional[ServerDescription], + ): + self.topology_description = topology_description + self.server_descriptions = server_descriptions + self.primary = primary + self.common_wire_version = common_wire_version + + def with_server_descriptions(self, server_descriptions: List[ServerDescription]) -> Selection: + return Selection( + self.topology_description, server_descriptions, self.common_wire_version, self.primary + ) + + def secondary_with_max_last_write_date(self) -> Optional[ServerDescription]: + secondaries = secondary_server_selector(self) + if secondaries.server_descriptions: + return max( + secondaries.server_descriptions, key=lambda sd: cast(float, sd.last_write_date) + ) + return None + + @property + def primary_selection(self) -> Selection: + primaries = [self.primary] if self.primary else [] + return self.with_server_descriptions(primaries) + + @property + def heartbeat_frequency(self) -> int: + return self.topology_description.heartbeat_frequency + + @property + def topology_type(self) -> int: + return self.topology_description.topology_type + + def __bool__(self) -> bool: + return bool(self.server_descriptions) + + def __getitem__(self, item: int) -> ServerDescription: + return self.server_descriptions[item] + + +def any_server_selector(selection: T) -> T: + return selection + + +def readable_server_selector(selection: Selection) -> Selection: + return selection.with_server_descriptions( + [s for s in selection.server_descriptions if s.is_readable] + ) + + +def writable_server_selector(selection: Selection) -> Selection: + return selection.with_server_descriptions( + [s for s in selection.server_descriptions if s.is_writable] + ) + + +def secondary_server_selector(selection: Selection) -> Selection: + return selection.with_server_descriptions( + [s for s in selection.server_descriptions if s.server_type == SERVER_TYPE.RSSecondary] + ) + + +def arbiter_server_selector(selection: Selection) -> Selection: + return selection.with_server_descriptions( + [s for s in selection.server_descriptions if s.server_type == SERVER_TYPE.RSArbiter] + ) + + +def writable_preferred_server_selector(selection: Selection) -> Selection: + """Like PrimaryPreferred but doesn't use tags or latency.""" + return writable_server_selector(selection) or secondary_server_selector(selection) + + +def apply_single_tag_set(tag_set: TagSet, selection: Selection) -> Selection: + """All servers matching one tag set. + + A tag set is a dict. A server matches if its tags are a superset: + A server tagged {'a': '1', 'b': '2'} matches the tag set {'a': '1'}. + + The empty tag set {} matches any server. + """ + + def tags_match(server_tags: Mapping[str, Any]) -> bool: + for key, value in tag_set.items(): + if key not in server_tags or server_tags[key] != value: + return False + + return True + + return selection.with_server_descriptions( + [s for s in selection.server_descriptions if tags_match(s.tags)] + ) + + +def apply_tag_sets(tag_sets: TagSets, selection: Selection) -> Selection: + """All servers match a list of tag sets. + + tag_sets is a list of dicts. The empty tag set {} matches any server, + and may be provided at the end of the list as a fallback. So + [{'a': 'value'}, {}] expresses a preference for servers tagged + {'a': 'value'}, but accepts any server if none matches the first + preference. + """ + for tag_set in tag_sets: + with_tag_set = apply_single_tag_set(tag_set, selection) + if with_tag_set: + return with_tag_set + + return selection.with_server_descriptions([]) + + +def secondary_with_tags_server_selector(tag_sets: TagSets, selection: Selection) -> Selection: + """All near-enough secondaries matching the tag sets.""" + return apply_tag_sets(tag_sets, secondary_server_selector(selection)) + + +def member_with_tags_server_selector(tag_sets: TagSets, selection: Selection) -> Selection: + """All near-enough members matching the tag sets.""" + return apply_tag_sets(tag_sets, readable_server_selector(selection)) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/server_type.py b/backend/test/lib/python3.8/site-packages/pymongo/server_type.py new file mode 100644 index 0000000000000000000000000000000000000000..ee53b6b97dc7c08aea547988865a33112a42d23a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/server_type.py @@ -0,0 +1,32 @@ +# Copyright 2014-2015 MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Type codes for MongoDB servers.""" + +from typing import NamedTuple + + +class _ServerType(NamedTuple): + Unknown: int + Mongos: int + RSPrimary: int + RSSecondary: int + RSArbiter: int + RSOther: int + RSGhost: int + Standalone: int + LoadBalancer: int + + +SERVER_TYPE = _ServerType(*range(9)) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/settings.py b/backend/test/lib/python3.8/site-packages/pymongo/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..d6ef93e5c2039a8c5bcf7aef84c1da55599112a8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/settings.py @@ -0,0 +1,160 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Represent MongoClient's configuration.""" + +import threading +import traceback +from typing import Any, Collection, Dict, Optional, Tuple, Type, Union + +from bson.objectid import ObjectId +from pymongo import common, monitor, pool +from pymongo.common import LOCAL_THRESHOLD_MS, SERVER_SELECTION_TIMEOUT +from pymongo.errors import ConfigurationError +from pymongo.pool import Pool, PoolOptions +from pymongo.server_description import ServerDescription +from pymongo.topology_description import TOPOLOGY_TYPE, _ServerSelector + + +class TopologySettings: + def __init__( + self, + seeds: Optional[Collection[Tuple[str, int]]] = None, + replica_set_name: Optional[str] = None, + pool_class: Optional[Type[Pool]] = None, + pool_options: Optional[PoolOptions] = None, + monitor_class: Optional[Type[monitor.Monitor]] = None, + condition_class: Optional[Type[threading.Condition]] = None, + local_threshold_ms: int = LOCAL_THRESHOLD_MS, + server_selection_timeout: int = SERVER_SELECTION_TIMEOUT, + heartbeat_frequency: int = common.HEARTBEAT_FREQUENCY, + server_selector: Optional[_ServerSelector] = None, + fqdn: Optional[str] = None, + direct_connection: Optional[bool] = False, + load_balanced: Optional[bool] = None, + srv_service_name: str = common.SRV_SERVICE_NAME, + srv_max_hosts: int = 0, + ): + """Represent MongoClient's configuration. + + Take a list of (host, port) pairs and optional replica set name. + """ + if heartbeat_frequency < common.MIN_HEARTBEAT_INTERVAL: + raise ConfigurationError( + "heartbeatFrequencyMS cannot be less than %d" + % (common.MIN_HEARTBEAT_INTERVAL * 1000,) + ) + + self._seeds: Collection[Tuple[str, int]] = seeds or [("localhost", 27017)] + self._replica_set_name = replica_set_name + self._pool_class: Type[Pool] = pool_class or pool.Pool + self._pool_options: PoolOptions = pool_options or PoolOptions() + self._monitor_class: Type[monitor.Monitor] = monitor_class or monitor.Monitor + self._condition_class: Type[threading.Condition] = condition_class or threading.Condition + self._local_threshold_ms = local_threshold_ms + self._server_selection_timeout = server_selection_timeout + self._server_selector = server_selector + self._fqdn = fqdn + self._heartbeat_frequency = heartbeat_frequency + self._direct = direct_connection + self._load_balanced = load_balanced + self._srv_service_name = srv_service_name + self._srv_max_hosts = srv_max_hosts or 0 + + self._topology_id = ObjectId() + # Store the allocation traceback to catch unclosed clients in the + # test suite. + self._stack = "".join(traceback.format_stack()) + + @property + def seeds(self) -> Collection[Tuple[str, int]]: + """List of server addresses.""" + return self._seeds + + @property + def replica_set_name(self) -> Optional[str]: + return self._replica_set_name + + @property + def pool_class(self) -> Type[Pool]: + return self._pool_class + + @property + def pool_options(self) -> PoolOptions: + return self._pool_options + + @property + def monitor_class(self) -> Type[monitor.Monitor]: + return self._monitor_class + + @property + def condition_class(self) -> Type[threading.Condition]: + return self._condition_class + + @property + def local_threshold_ms(self) -> int: + return self._local_threshold_ms + + @property + def server_selection_timeout(self) -> int: + return self._server_selection_timeout + + @property + def server_selector(self) -> Optional[_ServerSelector]: + return self._server_selector + + @property + def heartbeat_frequency(self) -> int: + return self._heartbeat_frequency + + @property + def fqdn(self) -> Optional[str]: + return self._fqdn + + @property + def direct(self) -> Optional[bool]: + """Connect directly to a single server, or use a set of servers? + + True if there is one seed and no replica_set_name. + """ + return self._direct + + @property + def load_balanced(self) -> Optional[bool]: + """True if the client was configured to connect to a load balancer.""" + return self._load_balanced + + @property + def srv_service_name(self) -> str: + """The srvServiceName.""" + return self._srv_service_name + + @property + def srv_max_hosts(self) -> int: + """The srvMaxHosts.""" + return self._srv_max_hosts + + def get_topology_type(self) -> int: + if self.load_balanced: + return TOPOLOGY_TYPE.LoadBalanced + elif self.direct: + return TOPOLOGY_TYPE.Single + elif self.replica_set_name is not None: + return TOPOLOGY_TYPE.ReplicaSetNoPrimary + else: + return TOPOLOGY_TYPE.Unknown + + def get_server_descriptions(self) -> Dict[Union[Tuple[str, int], Any], ServerDescription]: + """Initial dict of (address, ServerDescription) for all seeds.""" + return {address: ServerDescription(address) for address in self.seeds} diff --git a/backend/test/lib/python3.8/site-packages/pymongo/socket_checker.py b/backend/test/lib/python3.8/site-packages/pymongo/socket_checker.py new file mode 100644 index 0000000000000000000000000000000000000000..a83311cd0153cb74096e61537f2661acc6fc4807 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/socket_checker.py @@ -0,0 +1,104 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Select / poll helper""" + +import errno +import select +import sys +from typing import Any, Optional + +# PYTHON-2320: Jython does not fully support poll on SSL sockets, +# https://bugs.jython.org/issue2900 +_HAVE_POLL = hasattr(select, "poll") and not sys.platform.startswith("java") +_SelectError = getattr(select, "error", OSError) + + +def _errno_from_exception(exc: BaseException) -> Optional[int]: + if hasattr(exc, "errno"): + return exc.errno + if exc.args: + return exc.args[0] + return None + + +class SocketChecker: + def __init__(self) -> None: + self._poller: Optional[select.poll] + if _HAVE_POLL: + self._poller = select.poll() + else: + self._poller = None + + def select( + self, sock: Any, read: bool = False, write: bool = False, timeout: Optional[float] = 0 + ) -> bool: + """Select for reads or writes with a timeout in seconds (or None). + + Returns True if the socket is readable/writable, False on timeout. + """ + res: Any + while True: + try: + if self._poller: + mask = select.POLLERR | select.POLLHUP + if read: + mask = mask | select.POLLIN | select.POLLPRI + if write: + mask = mask | select.POLLOUT + self._poller.register(sock, mask) + try: + # poll() timeout is in milliseconds. select() + # timeout is in seconds. + timeout_ = None if timeout is None else timeout * 1000 + res = self._poller.poll(timeout_) + # poll returns a possibly-empty list containing + # (fd, event) 2-tuples for the descriptors that have + # events or errors to report. Return True if the list + # is not empty. + return bool(res) + finally: + self._poller.unregister(sock) + else: + rlist = [sock] if read else [] + wlist = [sock] if write else [] + res = select.select(rlist, wlist, [sock], timeout) + # select returns a 3-tuple of lists of objects that are + # ready: subsets of the first three arguments. Return + # True if any of the lists are not empty. + return any(res) + except (_SelectError, OSError) as exc: # type: ignore + if _errno_from_exception(exc) in (errno.EINTR, errno.EAGAIN): + continue + raise + + def socket_closed(self, sock: Any) -> bool: + """Return True if we know socket has been closed, False otherwise.""" + try: + return self.select(sock, read=True) + except (RuntimeError, KeyError): + # RuntimeError is raised during a concurrent poll. KeyError + # is raised by unregister if the socket is not in the poller. + # These errors should not be possible since we protect the + # poller with a mutex. + raise + except ValueError: + # ValueError is raised by register/unregister/select if the + # socket file descriptor is negative or outside the range for + # select (> 1023). + return True + except Exception: + # Any other exceptions should be attributed to a closed + # or invalid socket. + return True diff --git a/backend/test/lib/python3.8/site-packages/pymongo/srv_resolver.py b/backend/test/lib/python3.8/site-packages/pymongo/srv_resolver.py new file mode 100644 index 0000000000000000000000000000000000000000..67b781cf9cb980467dc78d6a8a48ab4c528b7cca --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/srv_resolver.py @@ -0,0 +1,138 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Support for resolving hosts and options from mongodb+srv:// URIs.""" +from __future__ import annotations + +import ipaddress +import random +from typing import Any, List, Optional, Tuple, Union + +try: + from dns import resolver + + _HAVE_DNSPYTHON = True +except ImportError: + _HAVE_DNSPYTHON = False + +from pymongo.common import CONNECT_TIMEOUT +from pymongo.errors import ConfigurationError + + +# dnspython can return bytes or str from various parts +# of its API depending on version. We always want str. +def maybe_decode(text: Union[str, bytes]) -> str: + if isinstance(text, bytes): + return text.decode() + return text + + +# PYTHON-2667 Lazily call dns.resolver methods for compatibility with eventlet. +def _resolve(*args: Any, **kwargs: Any) -> resolver.Answer: + if hasattr(resolver, "resolve"): + # dnspython >= 2 + return resolver.resolve(*args, **kwargs) + # dnspython 1.X + return resolver.query(*args, **kwargs) + + +_INVALID_HOST_MSG = ( + "Invalid URI host: %s is not a valid hostname for 'mongodb+srv://'. " + "Did you mean to use 'mongodb://'?" +) + + +class _SrvResolver: + def __init__( + self, + fqdn: str, + connect_timeout: Optional[float], + srv_service_name: str, + srv_max_hosts: int = 0, + ): + self.__fqdn = fqdn + self.__srv = srv_service_name + self.__connect_timeout = connect_timeout or CONNECT_TIMEOUT + self.__srv_max_hosts = srv_max_hosts or 0 + # Validate the fully qualified domain name. + try: + ipaddress.ip_address(fqdn) + raise ConfigurationError(_INVALID_HOST_MSG % ("an IP address",)) + except ValueError: + pass + + try: + self.__plist = self.__fqdn.split(".")[1:] + except Exception: + raise ConfigurationError(_INVALID_HOST_MSG % (fqdn,)) + self.__slen = len(self.__plist) + if self.__slen < 2: + raise ConfigurationError(_INVALID_HOST_MSG % (fqdn,)) + + def get_options(self) -> Optional[str]: + try: + results = _resolve(self.__fqdn, "TXT", lifetime=self.__connect_timeout) + except (resolver.NoAnswer, resolver.NXDOMAIN): + # No TXT records + return None + except Exception as exc: + raise ConfigurationError(str(exc)) + if len(results) > 1: + raise ConfigurationError("Only one TXT record is supported") + return (b"&".join([b"".join(res.strings) for res in results])).decode("utf-8") + + def _resolve_uri(self, encapsulate_errors: bool) -> resolver.Answer: + try: + results = _resolve( + "_" + self.__srv + "._tcp." + self.__fqdn, "SRV", lifetime=self.__connect_timeout + ) + except Exception as exc: + if not encapsulate_errors: + # Raise the original error. + raise + # Else, raise all errors as ConfigurationError. + raise ConfigurationError(str(exc)) + return results + + def _get_srv_response_and_hosts( + self, encapsulate_errors: bool + ) -> Tuple[resolver.Answer, List[Tuple[str, Any]]]: + results = self._resolve_uri(encapsulate_errors) + + # Construct address tuples + nodes = [ + (maybe_decode(res.target.to_text(omit_final_dot=True)), res.port) for res in results + ] + + # Validate hosts + for node in nodes: + try: + nlist = node[0].lower().split(".")[1:][-self.__slen :] + except Exception: + raise ConfigurationError(f"Invalid SRV host: {node[0]}") + if self.__plist != nlist: + raise ConfigurationError(f"Invalid SRV host: {node[0]}") + if self.__srv_max_hosts: + nodes = random.sample(nodes, min(self.__srv_max_hosts, len(nodes))) + return results, nodes + + def get_hosts(self) -> List[Tuple[str, Any]]: + _, nodes = self._get_srv_response_and_hosts(True) + return nodes + + def get_hosts_and_min_ttl(self) -> Tuple[List[Tuple[str, Any]], int]: + results, nodes = self._get_srv_response_and_hosts(False) + rrset = results.rrset + ttl = rrset.ttl if rrset else 0 + return nodes, ttl diff --git a/backend/test/lib/python3.8/site-packages/pymongo/ssl_context.py b/backend/test/lib/python3.8/site-packages/pymongo/ssl_context.py new file mode 100644 index 0000000000000000000000000000000000000000..63970cb5e23b4645fb770de2c1275d0a2b10d7a4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/ssl_context.py @@ -0,0 +1,39 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""A fake SSLContext implementation.""" + +import ssl as _ssl + +# PROTOCOL_TLS_CLIENT is Python 3.6+ +PROTOCOL_SSLv23 = getattr(_ssl, "PROTOCOL_TLS_CLIENT", _ssl.PROTOCOL_SSLv23) +OP_NO_SSLv2 = getattr(_ssl, "OP_NO_SSLv2", 0) +OP_NO_SSLv3 = getattr(_ssl, "OP_NO_SSLv3", 0) +OP_NO_COMPRESSION = getattr(_ssl, "OP_NO_COMPRESSION", 0) +# Python 3.7+, OpenSSL 1.1.0h+ +OP_NO_RENEGOTIATION = getattr(_ssl, "OP_NO_RENEGOTIATION", 0) + +HAS_SNI = getattr(_ssl, "HAS_SNI", False) +IS_PYOPENSSL = False + +# Errors raised by SSL sockets when in non-blocking mode. +BLOCKING_IO_ERRORS = (_ssl.SSLWantReadError, _ssl.SSLWantWriteError) + +# Base Exception class +SSLError = _ssl.SSLError + +from ssl import SSLContext # noqa: F401,E402 + +if hasattr(_ssl, "VERIFY_CRL_CHECK_LEAF"): + from ssl import VERIFY_CRL_CHECK_LEAF # noqa: F401 diff --git a/backend/test/lib/python3.8/site-packages/pymongo/ssl_support.py b/backend/test/lib/python3.8/site-packages/pymongo/ssl_support.py new file mode 100644 index 0000000000000000000000000000000000000000..dafd88bdb80befdbce4f12234bd3232e0cd3e266 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/ssl_support.py @@ -0,0 +1,101 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Support for SSL in PyMongo.""" + +from typing import Optional + +from pymongo.errors import ConfigurationError + +HAVE_SSL = True + +try: + import pymongo.pyopenssl_context as _ssl +except ImportError: + try: + import pymongo.ssl_context as _ssl # type: ignore[no-redef] + except ImportError: + HAVE_SSL = False + + +if HAVE_SSL: + # Note: The validate* functions below deal with users passing + # CPython ssl module constants to configure certificate verification + # at a high level. This is legacy behavior, but requires us to + # import the ssl module even if we're only using it for this purpose. + import ssl as _stdlibssl # noqa + from ssl import CERT_NONE, CERT_REQUIRED + + HAS_SNI = _ssl.HAS_SNI + IPADDR_SAFE = True + SSLError = _ssl.SSLError + BLOCKING_IO_ERRORS = _ssl.BLOCKING_IO_ERRORS + + def get_ssl_context( + certfile: Optional[str], + passphrase: Optional[str], + ca_certs: Optional[str], + crlfile: Optional[str], + allow_invalid_certificates: bool, + allow_invalid_hostnames: bool, + disable_ocsp_endpoint_check: bool, + ) -> _ssl.SSLContext: + """Create and return an SSLContext object.""" + verify_mode = CERT_NONE if allow_invalid_certificates else CERT_REQUIRED + ctx = _ssl.SSLContext(_ssl.PROTOCOL_SSLv23) + if verify_mode != CERT_NONE: + ctx.check_hostname = not allow_invalid_hostnames + else: + ctx.check_hostname = False + if hasattr(ctx, "check_ocsp_endpoint"): + ctx.check_ocsp_endpoint = not disable_ocsp_endpoint_check + if hasattr(ctx, "options"): + # Explicitly disable SSLv2, SSLv3 and TLS compression. Note that + # up to date versions of MongoDB 2.4 and above already disable + # SSLv2 and SSLv3, python disables SSLv2 by default in >= 2.7.7 + # and >= 3.3.4 and SSLv3 in >= 3.4.3. + ctx.options |= _ssl.OP_NO_SSLv2 + ctx.options |= _ssl.OP_NO_SSLv3 + ctx.options |= _ssl.OP_NO_COMPRESSION + ctx.options |= _ssl.OP_NO_RENEGOTIATION + if certfile is not None: + try: + ctx.load_cert_chain(certfile, None, passphrase) + except _ssl.SSLError as exc: + raise ConfigurationError(f"Private key doesn't match certificate: {exc}") + if crlfile is not None: + if _ssl.IS_PYOPENSSL: + raise ConfigurationError("tlsCRLFile cannot be used with PyOpenSSL") + # Match the server's behavior. + setattr(ctx, "verify_flags", getattr(_ssl, "VERIFY_CRL_CHECK_LEAF", 0)) # noqa + ctx.load_verify_locations(crlfile) + if ca_certs is not None: + ctx.load_verify_locations(ca_certs) + elif verify_mode != CERT_NONE: + ctx.load_default_certs() + ctx.verify_mode = verify_mode + return ctx + +else: + + class SSLError(Exception): # type: ignore + pass + + HAS_SNI = False + IPADDR_SAFE = False + BLOCKING_IO_ERRORS = () # type: ignore + + def get_ssl_context(*dummy): # type: ignore + """No ssl module, raise ConfigurationError.""" + raise ConfigurationError("The ssl module is not available.") diff --git a/backend/test/lib/python3.8/site-packages/pymongo/topology.py b/backend/test/lib/python3.8/site-packages/pymongo/topology.py new file mode 100644 index 0000000000000000000000000000000000000000..5b4197bc169024eceef6f11686a098671d168c36 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/topology.py @@ -0,0 +1,943 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Internal class to monitor a topology of one or more servers.""" + +from __future__ import annotations + +import os +import queue +import random +import time +import warnings +import weakref +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + List, + Mapping, + Optional, + Set, + Tuple, + cast, +) + +from pymongo import _csot, common, helpers, periodic_executor +from pymongo.client_session import _ServerSession, _ServerSessionPool +from pymongo.errors import ( + ConfigurationError, + ConnectionFailure, + InvalidOperation, + NetworkTimeout, + NotPrimaryError, + OperationFailure, + PyMongoError, + ServerSelectionTimeoutError, + WriteError, +) +from pymongo.hello import Hello +from pymongo.lock import _create_lock +from pymongo.monitor import SrvMonitor +from pymongo.pool import Pool, PoolOptions +from pymongo.server import Server +from pymongo.server_description import ServerDescription +from pymongo.server_selectors import ( + Selection, + any_server_selector, + arbiter_server_selector, + readable_server_selector, + secondary_server_selector, + writable_server_selector, +) +from pymongo.topology_description import ( + SRV_POLLING_TOPOLOGIES, + TOPOLOGY_TYPE, + TopologyDescription, + _updated_topology_description_srv_polling, + updated_topology_description, +) + +if TYPE_CHECKING: + from bson import ObjectId + from pymongo.settings import TopologySettings + from pymongo.typings import ClusterTime, _Address + + +def process_events_queue(queue_ref: weakref.ReferenceType[queue.Queue]) -> bool: + q = queue_ref() + if not q: + return False # Cancel PeriodicExecutor. + + while True: + try: + event = q.get_nowait() + except queue.Empty: + break + else: + fn, args = event + fn(*args) + + return True # Continue PeriodicExecutor. + + +class Topology: + """Monitor a topology of one or more servers.""" + + def __init__(self, topology_settings: TopologySettings): + self._topology_id = topology_settings._topology_id + self._listeners = topology_settings._pool_options._event_listeners + self._publish_server = self._listeners is not None and self._listeners.enabled_for_server + self._publish_tp = self._listeners is not None and self._listeners.enabled_for_topology + + # Create events queue if there are publishers. + self._events = None + self.__events_executor: Any = None + + if self._publish_server or self._publish_tp: + self._events = queue.Queue(maxsize=100) + + if self._publish_tp: + assert self._events is not None + self._events.put((self._listeners.publish_topology_opened, (self._topology_id,))) + self._settings = topology_settings + topology_description = TopologyDescription( + topology_settings.get_topology_type(), + topology_settings.get_server_descriptions(), + topology_settings.replica_set_name, + None, + None, + topology_settings, + ) + + self._description = topology_description + if self._publish_tp: + assert self._events is not None + initial_td = TopologyDescription( + TOPOLOGY_TYPE.Unknown, {}, None, None, None, self._settings + ) + self._events.put( + ( + self._listeners.publish_topology_description_changed, + (initial_td, self._description, self._topology_id), + ) + ) + + for seed in topology_settings.seeds: + if self._publish_server: + assert self._events is not None + self._events.put((self._listeners.publish_server_opened, (seed, self._topology_id))) + + # Store the seed list to help diagnose errors in _error_message(). + self._seed_addresses = list(topology_description.server_descriptions()) + self._opened = False + self._closed = False + self._lock = _create_lock() + self._condition = self._settings.condition_class(self._lock) + self._servers: Dict[_Address, Server] = {} + self._pid: Optional[int] = None + self._max_cluster_time: Optional[ClusterTime] = None + self._session_pool = _ServerSessionPool() + + if self._publish_server or self._publish_tp: + assert self._events is not None + weak: weakref.ReferenceType[queue.Queue] + + def target() -> bool: + return process_events_queue(weak) + + executor = periodic_executor.PeriodicExecutor( + interval=common.EVENTS_QUEUE_FREQUENCY, + min_interval=common.MIN_HEARTBEAT_INTERVAL, + target=target, + name="pymongo_events_thread", + ) + + # We strongly reference the executor and it weakly references + # the queue via this closure. When the topology is freed, stop + # the executor soon. + weak = weakref.ref(self._events, executor.close) + self.__events_executor = executor + executor.open() + + self._srv_monitor = None + if self._settings.fqdn is not None and not self._settings.load_balanced: + self._srv_monitor = SrvMonitor(self, self._settings) + + def open(self) -> None: + """Start monitoring, or restart after a fork. + + No effect if called multiple times. + + .. warning:: Topology is shared among multiple threads and is protected + by mutual exclusion. Using Topology from a process other than the one + that initialized it will emit a warning and may result in deadlock. To + prevent this from happening, MongoClient must be created after any + forking. + + """ + pid = os.getpid() + if self._pid is None: + self._pid = pid + elif pid != self._pid: + self._pid = pid + warnings.warn( + "MongoClient opened before fork. May not be entirely fork-safe, " + "proceed with caution. See PyMongo's documentation for details: " + "https://pymongo.readthedocs.io/en/stable/faq.html#" + "is-pymongo-fork-safe" + ) + with self._lock: + # Close servers and clear the pools. + for server in self._servers.values(): + server.close() + # Reset the session pool to avoid duplicate sessions in + # the child process. + self._session_pool.reset() + + with self._lock: + self._ensure_opened() + + def get_server_selection_timeout(self) -> float: + # CSOT: use remaining timeout when set. + timeout = _csot.remaining() + if timeout is None: + return self._settings.server_selection_timeout + return timeout + + def select_servers( + self, + selector: Callable[[Selection], Selection], + server_selection_timeout: Optional[float] = None, + address: Optional[_Address] = None, + ) -> List[Server]: + """Return a list of Servers matching selector, or time out. + + :Parameters: + - `selector`: function that takes a list of Servers and returns + a subset of them. + - `server_selection_timeout` (optional): maximum seconds to wait. + If not provided, the default value common.SERVER_SELECTION_TIMEOUT + is used. + - `address`: optional server address to select. + + Calls self.open() if needed. + + Raises exc:`ServerSelectionTimeoutError` after + `server_selection_timeout` if no matching servers are found. + """ + if server_selection_timeout is None: + server_timeout = self.get_server_selection_timeout() + else: + server_timeout = server_selection_timeout + + with self._lock: + server_descriptions = self._select_servers_loop(selector, server_timeout, address) + + return [ + cast(Server, self.get_server_by_address(sd.address)) for sd in server_descriptions + ] + + def _select_servers_loop( + self, + selector: Callable[[Selection], Selection], + timeout: float, + address: Optional[_Address], + ) -> List[ServerDescription]: + """select_servers() guts. Hold the lock when calling this.""" + now = time.monotonic() + end_time = now + timeout + server_descriptions = self._description.apply_selector( + selector, address, custom_selector=self._settings.server_selector + ) + + while not server_descriptions: + # No suitable servers. + if timeout == 0 or now > end_time: + raise ServerSelectionTimeoutError( + f"{self._error_message(selector)}, Timeout: {timeout}s, Topology Description: {self.description!r}" + ) + + self._ensure_opened() + self._request_check_all() + + # Release the lock and wait for the topology description to + # change, or for a timeout. We won't miss any changes that + # came after our most recent apply_selector call, since we've + # held the lock until now. + self._condition.wait(common.MIN_HEARTBEAT_INTERVAL) + self._description.check_compatible() + now = time.monotonic() + server_descriptions = self._description.apply_selector( + selector, address, custom_selector=self._settings.server_selector + ) + + self._description.check_compatible() + return server_descriptions + + def _select_server( + self, + selector: Callable[[Selection], Selection], + server_selection_timeout: Optional[float] = None, + address: Optional[_Address] = None, + ) -> Server: + servers = self.select_servers(selector, server_selection_timeout, address) + if len(servers) == 1: + return servers[0] + server1, server2 = random.sample(servers, 2) + if server1.pool.operation_count <= server2.pool.operation_count: + return server1 + else: + return server2 + + def select_server( + self, + selector: Callable[[Selection], Selection], + server_selection_timeout: Optional[float] = None, + address: Optional[_Address] = None, + ) -> Server: + """Like select_servers, but choose a random server if several match.""" + server = self._select_server(selector, server_selection_timeout, address) + if _csot.get_timeout(): + _csot.set_rtt(server.description.min_round_trip_time) + return server + + def select_server_by_address( + self, address: _Address, server_selection_timeout: Optional[int] = None + ) -> Server: + """Return a Server for "address", reconnecting if necessary. + + If the server's type is not known, request an immediate check of all + servers. Time out after "server_selection_timeout" if the server + cannot be reached. + + :Parameters: + - `address`: A (host, port) pair. + - `server_selection_timeout` (optional): maximum seconds to wait. + If not provided, the default value + common.SERVER_SELECTION_TIMEOUT is used. + + Calls self.open() if needed. + + Raises exc:`ServerSelectionTimeoutError` after + `server_selection_timeout` if no matching servers are found. + """ + return self.select_server(any_server_selector, server_selection_timeout, address) + + def _process_change( + self, server_description: ServerDescription, reset_pool: bool = False + ) -> None: + """Process a new ServerDescription on an opened topology. + + Hold the lock when calling this. + """ + td_old = self._description + sd_old = td_old._server_descriptions[server_description.address] + if _is_stale_server_description(sd_old, server_description): + # This is a stale hello response. Ignore it. + return + + new_td = updated_topology_description(self._description, server_description) + # CMAP: Ensure the pool is "ready" when the server is selectable. + if server_description.is_readable or ( + server_description.is_server_type_known and new_td.topology_type == TOPOLOGY_TYPE.Single + ): + server = self._servers.get(server_description.address) + if server: + server.pool.ready() + + suppress_event = (self._publish_server or self._publish_tp) and sd_old == server_description + if self._publish_server and not suppress_event: + assert self._events is not None + self._events.put( + ( + self._listeners.publish_server_description_changed, + (sd_old, server_description, server_description.address, self._topology_id), + ) + ) + + self._description = new_td + self._update_servers() + self._receive_cluster_time_no_lock(server_description.cluster_time) + + if self._publish_tp and not suppress_event: + assert self._events is not None + self._events.put( + ( + self._listeners.publish_topology_description_changed, + (td_old, self._description, self._topology_id), + ) + ) + + # Shutdown SRV polling for unsupported cluster types. + # This is only applicable if the old topology was Unknown, and the + # new one is something other than Unknown or Sharded. + if self._srv_monitor and ( + td_old.topology_type == TOPOLOGY_TYPE.Unknown + and self._description.topology_type not in SRV_POLLING_TOPOLOGIES + ): + self._srv_monitor.close() + + # Clear the pool from a failed heartbeat. + if reset_pool: + server = self._servers.get(server_description.address) + if server: + server.pool.reset() + + # Wake waiters in select_servers(). + self._condition.notify_all() + + def on_change(self, server_description: ServerDescription, reset_pool: bool = False) -> None: + """Process a new ServerDescription after an hello call completes.""" + # We do no I/O holding the lock. + with self._lock: + # Monitors may continue working on hello calls for some time + # after a call to Topology.close, so this method may be called at + # any time. Ensure the topology is open before processing the + # change. + # Any monitored server was definitely in the topology description + # once. Check if it's still in the description or if some state- + # change removed it. E.g., we got a host list from the primary + # that didn't include this server. + if self._opened and self._description.has_server(server_description.address): + self._process_change(server_description, reset_pool) + + def _process_srv_update(self, seedlist: List[Tuple[str, Any]]) -> None: + """Process a new seedlist on an opened topology. + Hold the lock when calling this. + """ + td_old = self._description + if td_old.topology_type not in SRV_POLLING_TOPOLOGIES: + return + self._description = _updated_topology_description_srv_polling(self._description, seedlist) + + self._update_servers() + + if self._publish_tp: + assert self._events is not None + self._events.put( + ( + self._listeners.publish_topology_description_changed, + (td_old, self._description, self._topology_id), + ) + ) + + def on_srv_update(self, seedlist: List[Tuple[str, Any]]) -> None: + """Process a new list of nodes obtained from scanning SRV records.""" + # We do no I/O holding the lock. + with self._lock: + if self._opened: + self._process_srv_update(seedlist) + + def get_server_by_address(self, address: _Address) -> Optional[Server]: + """Get a Server or None. + + Returns the current version of the server immediately, even if it's + Unknown or absent from the topology. Only use this in unittests. + In driver code, use select_server_by_address, since then you're + assured a recent view of the server's type and wire protocol version. + """ + return self._servers.get(address) + + def has_server(self, address: _Address) -> bool: + return address in self._servers + + def get_primary(self) -> Optional[_Address]: + """Return primary's address or None.""" + # Implemented here in Topology instead of MongoClient, so it can lock. + with self._lock: + topology_type = self._description.topology_type + if topology_type != TOPOLOGY_TYPE.ReplicaSetWithPrimary: + return None + + return writable_server_selector(self._new_selection())[0].address + + def _get_replica_set_members(self, selector: Callable[[Selection], Selection]) -> Set[_Address]: + """Return set of replica set member addresses.""" + # Implemented here in Topology instead of MongoClient, so it can lock. + with self._lock: + topology_type = self._description.topology_type + if topology_type not in ( + TOPOLOGY_TYPE.ReplicaSetWithPrimary, + TOPOLOGY_TYPE.ReplicaSetNoPrimary, + ): + return set() + + return {sd.address for sd in iter(selector(self._new_selection()))} + + def get_secondaries(self) -> Set[_Address]: + """Return set of secondary addresses.""" + return self._get_replica_set_members(secondary_server_selector) + + def get_arbiters(self) -> Set[_Address]: + """Return set of arbiter addresses.""" + return self._get_replica_set_members(arbiter_server_selector) + + def max_cluster_time(self) -> Optional[ClusterTime]: + """Return a document, the highest seen $clusterTime.""" + return self._max_cluster_time + + def _receive_cluster_time_no_lock(self, cluster_time: Optional[Mapping[str, Any]]) -> None: + # Driver Sessions Spec: "Whenever a driver receives a cluster time from + # a server it MUST compare it to the current highest seen cluster time + # for the deployment. If the new cluster time is higher than the + # highest seen cluster time it MUST become the new highest seen cluster + # time. Two cluster times are compared using only the BsonTimestamp + # value of the clusterTime embedded field." + if cluster_time: + # ">" uses bson.timestamp.Timestamp's comparison operator. + if ( + not self._max_cluster_time + or cluster_time["clusterTime"] > self._max_cluster_time["clusterTime"] + ): + self._max_cluster_time = cluster_time + + def receive_cluster_time(self, cluster_time: Optional[Mapping[str, Any]]) -> None: + with self._lock: + self._receive_cluster_time_no_lock(cluster_time) + + def request_check_all(self, wait_time: int = 5) -> None: + """Wake all monitors, wait for at least one to check its server.""" + with self._lock: + self._request_check_all() + self._condition.wait(wait_time) + + def data_bearing_servers(self) -> List[ServerDescription]: + """Return a list of all data-bearing servers. + + This includes any server that might be selected for an operation. + """ + if self._description.topology_type == TOPOLOGY_TYPE.Single: + return self._description.known_servers + return self._description.readable_servers + + def update_pool(self) -> None: + # Remove any stale sockets and add new sockets if pool is too small. + servers = [] + with self._lock: + # Only update pools for data-bearing servers. + for sd in self.data_bearing_servers(): + server = self._servers[sd.address] + servers.append((server, server.pool.gen.get_overall())) + + for server, generation in servers: + try: + server.pool.remove_stale_sockets(generation) + except PyMongoError as exc: + ctx = _ErrorContext(exc, 0, generation, False, None) + self.handle_error(server.description.address, ctx) + raise + + def close(self) -> None: + """Clear pools and terminate monitors. Topology does not reopen on + demand. Any further operations will raise + :exc:`~.errors.InvalidOperation`. + """ + with self._lock: + for server in self._servers.values(): + server.close() + + # Mark all servers Unknown. + self._description = self._description.reset() + for address, sd in self._description.server_descriptions().items(): + if address in self._servers: + self._servers[address].description = sd + + # Stop SRV polling thread. + if self._srv_monitor: + self._srv_monitor.close() + + self._opened = False + self._closed = True + + # Publish only after releasing the lock. + if self._publish_tp: + assert self._events is not None + self._events.put((self._listeners.publish_topology_closed, (self._topology_id,))) + if self._publish_server or self._publish_tp: + self.__events_executor.close() + + @property + def description(self) -> TopologyDescription: + return self._description + + def pop_all_sessions(self) -> List[_ServerSession]: + """Pop all session ids from the pool.""" + with self._lock: + return self._session_pool.pop_all() + + def _check_implicit_session_support(self) -> None: + with self._lock: + self._check_session_support() + + def _check_session_support(self) -> float: + """Internal check for session support on clusters.""" + if self._settings.load_balanced: + # Sessions never time out in load balanced mode. + return float("inf") + session_timeout = self._description.logical_session_timeout_minutes + if session_timeout is None: + # Maybe we need an initial scan? Can raise ServerSelectionError. + if self._description.topology_type == TOPOLOGY_TYPE.Single: + if not self._description.has_known_servers: + self._select_servers_loop( + any_server_selector, self.get_server_selection_timeout(), None + ) + elif not self._description.readable_servers: + self._select_servers_loop( + readable_server_selector, self.get_server_selection_timeout(), None + ) + + session_timeout = self._description.logical_session_timeout_minutes + if session_timeout is None: + raise ConfigurationError("Sessions are not supported by this MongoDB deployment") + return session_timeout + + def get_server_session(self) -> _ServerSession: + """Start or resume a server session, or raise ConfigurationError.""" + with self._lock: + session_timeout = self._check_session_support() + return self._session_pool.get_server_session(session_timeout) + + def return_server_session(self, server_session: _ServerSession, lock: bool) -> None: + if lock: + with self._lock: + self._session_pool.return_server_session( + server_session, self._description.logical_session_timeout_minutes + ) + else: + # Called from a __del__ method, can't use a lock. + self._session_pool.return_server_session_no_lock(server_session) + + def _new_selection(self) -> Selection: + """A Selection object, initially including all known servers. + + Hold the lock when calling this. + """ + return Selection.from_topology_description(self._description) + + def _ensure_opened(self) -> None: + """Start monitors, or restart after a fork. + + Hold the lock when calling this. + """ + if self._closed: + raise InvalidOperation("Cannot use MongoClient after close") + + if not self._opened: + self._opened = True + self._update_servers() + + # Start or restart the events publishing thread. + if self._publish_tp or self._publish_server: + self.__events_executor.open() + + # Start the SRV polling thread. + if self._srv_monitor and (self.description.topology_type in SRV_POLLING_TOPOLOGIES): + self._srv_monitor.open() + + if self._settings.load_balanced: + # Emit initial SDAM events for load balancer mode. + self._process_change( + ServerDescription( + self._seed_addresses[0], + Hello({"ok": 1, "serviceId": self._topology_id, "maxWireVersion": 13}), + ) + ) + + # Ensure that the monitors are open. + for server in self._servers.values(): + server.open() + + def _is_stale_error(self, address: _Address, err_ctx: _ErrorContext) -> bool: + server = self._servers.get(address) + if server is None: + # Another thread removed this server from the topology. + return True + + if server._pool.stale_generation(err_ctx.sock_generation, err_ctx.service_id): + # This is an outdated error from a previous pool version. + return True + + # topologyVersion check, ignore error when cur_tv >= error_tv: + cur_tv = server.description.topology_version + error = err_ctx.error + error_tv = None + if error and hasattr(error, "details"): + if isinstance(error.details, dict): + error_tv = error.details.get("topologyVersion") + + return _is_stale_error_topology_version(cur_tv, error_tv) + + def _handle_error(self, address: _Address, err_ctx: _ErrorContext) -> None: + if self._is_stale_error(address, err_ctx): + return + + server = self._servers[address] + error = err_ctx.error + service_id = err_ctx.service_id + + # Ignore a handshake error if the server is behind a load balancer but + # the service ID is unknown. This indicates that the error happened + # when dialing the connection or during the MongoDB handshake, so we + # don't know the service ID to use for clearing the pool. + if self._settings.load_balanced and not service_id and not err_ctx.completed_handshake: + return + + if isinstance(error, NetworkTimeout) and err_ctx.completed_handshake: + # The socket has been closed. Don't reset the server. + # Server Discovery And Monitoring Spec: "When an application + # operation fails because of any network error besides a socket + # timeout...." + return + elif isinstance(error, WriteError): + # Ignore writeErrors. + return + elif isinstance(error, (NotPrimaryError, OperationFailure)): + # As per the SDAM spec if: + # - the server sees a "not primary" error, and + # - the server is not shutting down, and + # - the server version is >= 4.2, then + # we keep the existing connection pool, but mark the server type + # as Unknown and request an immediate check of the server. + # Otherwise, we clear the connection pool, mark the server as + # Unknown and request an immediate check of the server. + if hasattr(error, "code"): + err_code = error.code + else: + # Default error code if one does not exist. + default = 10107 if isinstance(error, NotPrimaryError) else None + err_code = error.details.get("code", default) # type: ignore[union-attr] + if err_code in helpers._NOT_PRIMARY_CODES: + is_shutting_down = err_code in helpers._SHUTDOWN_CODES + # Mark server Unknown, clear the pool, and request check. + if not self._settings.load_balanced: + self._process_change(ServerDescription(address, error=error)) + if is_shutting_down or (err_ctx.max_wire_version <= 7): + # Clear the pool. + server.reset(service_id) + server.request_check() + elif not err_ctx.completed_handshake: + # Unknown command error during the connection handshake. + if not self._settings.load_balanced: + self._process_change(ServerDescription(address, error=error)) + # Clear the pool. + server.reset(service_id) + elif isinstance(error, ConnectionFailure): + # "Client MUST replace the server's description with type Unknown + # ... MUST NOT request an immediate check of the server." + if not self._settings.load_balanced: + self._process_change(ServerDescription(address, error=error)) + # Clear the pool. + server.reset(service_id) + # "When a client marks a server Unknown from `Network error when + # reading or writing`_, clients MUST cancel the hello check on + # that server and close the current monitoring connection." + server._monitor.cancel_check() + + def handle_error(self, address: _Address, err_ctx: _ErrorContext) -> None: + """Handle an application error. + + May reset the server to Unknown, clear the pool, and request an + immediate check depending on the error and the context. + """ + with self._lock: + self._handle_error(address, err_ctx) + + def _request_check_all(self) -> None: + """Wake all monitors. Hold the lock when calling this.""" + for server in self._servers.values(): + server.request_check() + + def _update_servers(self) -> None: + """Sync our Servers from TopologyDescription.server_descriptions. + + Hold the lock while calling this. + """ + for address, sd in self._description.server_descriptions().items(): + if address not in self._servers: + monitor = self._settings.monitor_class( + server_description=sd, + topology=self, + pool=self._create_pool_for_monitor(address), + topology_settings=self._settings, + ) + + weak = None + if self._publish_server and self._events is not None: + weak = weakref.ref(self._events) + server = Server( + server_description=sd, + pool=self._create_pool_for_server(address), + monitor=monitor, + topology_id=self._topology_id, + listeners=self._listeners, + events=weak, + ) + + self._servers[address] = server + server.open() + else: + # Cache old is_writable value. + was_writable = self._servers[address].description.is_writable + # Update server description. + self._servers[address].description = sd + # Update is_writable value of the pool, if it changed. + if was_writable != sd.is_writable: + self._servers[address].pool.update_is_writable(sd.is_writable) + + for address, server in list(self._servers.items()): + if not self._description.has_server(address): + server.close() + self._servers.pop(address) + + def _create_pool_for_server(self, address: _Address) -> Pool: + return self._settings.pool_class(address, self._settings.pool_options) + + def _create_pool_for_monitor(self, address: _Address) -> Pool: + options = self._settings.pool_options + + # According to the Server Discovery And Monitoring Spec, monitors use + # connect_timeout for both connect_timeout and socket_timeout. The + # pool only has one socket so maxPoolSize and so on aren't needed. + monitor_pool_options = PoolOptions( + connect_timeout=options.connect_timeout, + socket_timeout=options.connect_timeout, + ssl_context=options._ssl_context, + tls_allow_invalid_hostnames=options.tls_allow_invalid_hostnames, + event_listeners=options._event_listeners, + appname=options.appname, + driver=options.driver, + pause_enabled=False, + server_api=options.server_api, + ) + + return self._settings.pool_class(address, monitor_pool_options, handshake=False) + + def _error_message(self, selector: Callable[[Selection], Selection]) -> str: + """Format an error message if server selection fails. + + Hold the lock when calling this. + """ + is_replica_set = self._description.topology_type in ( + TOPOLOGY_TYPE.ReplicaSetWithPrimary, + TOPOLOGY_TYPE.ReplicaSetNoPrimary, + ) + + if is_replica_set: + server_plural = "replica set members" + elif self._description.topology_type == TOPOLOGY_TYPE.Sharded: + server_plural = "mongoses" + else: + server_plural = "servers" + + if self._description.known_servers: + # We've connected, but no servers match the selector. + if selector is writable_server_selector: + if is_replica_set: + return "No primary available for writes" + else: + return "No %s available for writes" % server_plural + else: + return f'No {server_plural} match selector "{selector}"' + else: + addresses = list(self._description.server_descriptions()) + servers = list(self._description.server_descriptions().values()) + if not servers: + if is_replica_set: + # We removed all servers because of the wrong setName? + return 'No {} available for replica set name "{}"'.format( + server_plural, + self._settings.replica_set_name, + ) + else: + return "No %s available" % server_plural + + # 1 or more servers, all Unknown. Are they unknown for one reason? + error = servers[0].error + same = all(server.error == error for server in servers[1:]) + if same: + if error is None: + # We're still discovering. + return "No %s found yet" % server_plural + + if is_replica_set and not set(addresses).intersection(self._seed_addresses): + # We replaced our seeds with new hosts but can't reach any. + return ( + "Could not reach any servers in %s. Replica set is" + " configured with internal hostnames or IPs?" % addresses + ) + + return str(error) + else: + return ",".join(str(server.error) for server in servers if server.error) + + def __repr__(self) -> str: + msg = "" + if not self._opened: + msg = "CLOSED " + return f"<{self.__class__.__name__} {msg}{self._description!r}>" + + def eq_props(self) -> Tuple[Tuple[_Address, ...], Optional[str], Optional[str], str]: + """The properties to use for MongoClient/Topology equality checks.""" + ts = self._settings + return (tuple(sorted(ts.seeds)), ts.replica_set_name, ts.fqdn, ts.srv_service_name) + + def __eq__(self, other: object) -> bool: + if isinstance(other, self.__class__): + return self.eq_props() == other.eq_props() + return NotImplemented + + def __hash__(self) -> int: + return hash(self.eq_props()) + + +class _ErrorContext: + """An error with context for SDAM error handling.""" + + def __init__( + self, + error: BaseException, + max_wire_version: int, + sock_generation: int, + completed_handshake: bool, + service_id: Optional[ObjectId], + ): + self.error = error + self.max_wire_version = max_wire_version + self.sock_generation = sock_generation + self.completed_handshake = completed_handshake + self.service_id = service_id + + +def _is_stale_error_topology_version( + current_tv: Optional[Mapping[str, Any]], error_tv: Optional[Mapping[str, Any]] +) -> bool: + """Return True if the error's topologyVersion is <= current.""" + if current_tv is None or error_tv is None: + return False + if current_tv["processId"] != error_tv["processId"]: + return False + return current_tv["counter"] >= error_tv["counter"] + + +def _is_stale_server_description(current_sd: ServerDescription, new_sd: ServerDescription) -> bool: + """Return True if the new topologyVersion is < current.""" + current_tv, new_tv = current_sd.topology_version, new_sd.topology_version + if current_tv is None or new_tv is None: + return False + if current_tv["processId"] != new_tv["processId"]: + return False + return current_tv["counter"] > new_tv["counter"] diff --git a/backend/test/lib/python3.8/site-packages/pymongo/topology_description.py b/backend/test/lib/python3.8/site-packages/pymongo/topology_description.py new file mode 100644 index 0000000000000000000000000000000000000000..21d47a531cab6df944db815788fc57610d3ce1ce --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/topology_description.py @@ -0,0 +1,677 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + +"""Represent a deployment of MongoDB servers.""" + +from random import sample +from typing import ( + Any, + Callable, + Dict, + List, + Mapping, + MutableMapping, + NamedTuple, + Optional, + Tuple, + cast, +) + +from bson.min_key import MinKey +from bson.objectid import ObjectId +from pymongo import common +from pymongo.errors import ConfigurationError +from pymongo.read_preferences import ReadPreference, _AggWritePref, _ServerMode +from pymongo.server_description import ServerDescription +from pymongo.server_selectors import Selection +from pymongo.server_type import SERVER_TYPE +from pymongo.typings import _Address + + +# Enumeration for various kinds of MongoDB cluster topologies. +class _TopologyType(NamedTuple): + Single: int + ReplicaSetNoPrimary: int + ReplicaSetWithPrimary: int + Sharded: int + Unknown: int + LoadBalanced: int + + +TOPOLOGY_TYPE = _TopologyType(*range(6)) + +# Topologies compatible with SRV record polling. +SRV_POLLING_TOPOLOGIES: Tuple[int, int] = (TOPOLOGY_TYPE.Unknown, TOPOLOGY_TYPE.Sharded) + + +_ServerSelector = Callable[[List[ServerDescription]], List[ServerDescription]] + + +class TopologyDescription: + def __init__( + self, + topology_type: int, + server_descriptions: Dict[_Address, ServerDescription], + replica_set_name: Optional[str], + max_set_version: Optional[int], + max_election_id: Optional[ObjectId], + topology_settings: Any, + ) -> None: + """Representation of a deployment of MongoDB servers. + + :Parameters: + - `topology_type`: initial type + - `server_descriptions`: dict of (address, ServerDescription) for + all seeds + - `replica_set_name`: replica set name or None + - `max_set_version`: greatest setVersion seen from a primary, or None + - `max_election_id`: greatest electionId seen from a primary, or None + - `topology_settings`: a TopologySettings + """ + self._topology_type = topology_type + self._replica_set_name = replica_set_name + self._server_descriptions = server_descriptions + self._max_set_version = max_set_version + self._max_election_id = max_election_id + + # The heartbeat_frequency is used in staleness estimates. + self._topology_settings = topology_settings + + # Is PyMongo compatible with all servers' wire protocols? + self._incompatible_err = None + if self._topology_type != TOPOLOGY_TYPE.LoadBalanced: + self._init_incompatible_err() + + # Server Discovery And Monitoring Spec: Whenever a client updates the + # TopologyDescription from an hello response, it MUST set + # TopologyDescription.logicalSessionTimeoutMinutes to the smallest + # logicalSessionTimeoutMinutes value among ServerDescriptions of all + # data-bearing server types. If any have a null + # logicalSessionTimeoutMinutes, then + # TopologyDescription.logicalSessionTimeoutMinutes MUST be set to null. + readable_servers = self.readable_servers + if not readable_servers: + self._ls_timeout_minutes = None + elif any(s.logical_session_timeout_minutes is None for s in readable_servers): + self._ls_timeout_minutes = None + else: + self._ls_timeout_minutes = min( # type: ignore[type-var] + s.logical_session_timeout_minutes for s in readable_servers + ) + + def _init_incompatible_err(self) -> None: + """Internal compatibility check for non-load balanced topologies.""" + for s in self._server_descriptions.values(): + if not s.is_server_type_known: + continue + + # s.min/max_wire_version is the server's wire protocol. + # MIN/MAX_SUPPORTED_WIRE_VERSION is what PyMongo supports. + server_too_new = ( + # Server too new. + s.min_wire_version is not None + and s.min_wire_version > common.MAX_SUPPORTED_WIRE_VERSION + ) + + server_too_old = ( + # Server too old. + s.max_wire_version is not None + and s.max_wire_version < common.MIN_SUPPORTED_WIRE_VERSION + ) + + if server_too_new: + self._incompatible_err = ( + "Server at %s:%d requires wire version %d, but this " # type: ignore + "version of PyMongo only supports up to %d." + % ( + s.address[0], + s.address[1] or 0, + s.min_wire_version, + common.MAX_SUPPORTED_WIRE_VERSION, + ) + ) + + elif server_too_old: + self._incompatible_err = ( + "Server at %s:%d reports wire version %d, but this " # type: ignore + "version of PyMongo requires at least %d (MongoDB %s)." + % ( + s.address[0], + s.address[1] or 0, + s.max_wire_version, + common.MIN_SUPPORTED_WIRE_VERSION, + common.MIN_SUPPORTED_SERVER_VERSION, + ) + ) + + break + + def check_compatible(self) -> None: + """Raise ConfigurationError if any server is incompatible. + + A server is incompatible if its wire protocol version range does not + overlap with PyMongo's. + """ + if self._incompatible_err: + raise ConfigurationError(self._incompatible_err) + + def has_server(self, address: _Address) -> bool: + return address in self._server_descriptions + + def reset_server(self, address: _Address) -> "TopologyDescription": + """A copy of this description, with one server marked Unknown.""" + unknown_sd = self._server_descriptions[address].to_unknown() + return updated_topology_description(self, unknown_sd) + + def reset(self) -> "TopologyDescription": + """A copy of this description, with all servers marked Unknown.""" + if self._topology_type == TOPOLOGY_TYPE.ReplicaSetWithPrimary: + topology_type = TOPOLOGY_TYPE.ReplicaSetNoPrimary + else: + topology_type = self._topology_type + + # The default ServerDescription's type is Unknown. + sds = {address: ServerDescription(address) for address in self._server_descriptions} + + return TopologyDescription( + topology_type, + sds, + self._replica_set_name, + self._max_set_version, + self._max_election_id, + self._topology_settings, + ) + + def server_descriptions(self) -> Dict[_Address, ServerDescription]: + """Dict of (address, + :class:`~pymongo.server_description.ServerDescription`). + """ + return self._server_descriptions.copy() + + @property + def topology_type(self) -> int: + """The type of this topology.""" + return self._topology_type + + @property + def topology_type_name(self) -> str: + """The topology type as a human readable string. + + .. versionadded:: 3.4 + """ + return TOPOLOGY_TYPE._fields[self._topology_type] + + @property + def replica_set_name(self) -> Optional[str]: + """The replica set name.""" + return self._replica_set_name + + @property + def max_set_version(self) -> Optional[int]: + """Greatest setVersion seen from a primary, or None.""" + return self._max_set_version + + @property + def max_election_id(self) -> Optional[ObjectId]: + """Greatest electionId seen from a primary, or None.""" + return self._max_election_id + + @property + def logical_session_timeout_minutes(self) -> Optional[int]: + """Minimum logical session timeout, or None.""" + return self._ls_timeout_minutes + + @property + def known_servers(self) -> List[ServerDescription]: + """List of Servers of types besides Unknown.""" + return [s for s in self._server_descriptions.values() if s.is_server_type_known] + + @property + def has_known_servers(self) -> bool: + """Whether there are any Servers of types besides Unknown.""" + return any(s for s in self._server_descriptions.values() if s.is_server_type_known) + + @property + def readable_servers(self) -> List[ServerDescription]: + """List of readable Servers.""" + return [s for s in self._server_descriptions.values() if s.is_readable] + + @property + def common_wire_version(self) -> Optional[int]: + """Minimum of all servers' max wire versions, or None.""" + servers = self.known_servers + if servers: + return min(s.max_wire_version for s in self.known_servers) + + return None + + @property + def heartbeat_frequency(self) -> int: + return self._topology_settings.heartbeat_frequency + + @property + def srv_max_hosts(self) -> int: + return self._topology_settings._srv_max_hosts + + def _apply_local_threshold(self, selection: Optional[Selection]) -> List[ServerDescription]: + if not selection: + return [] + # Round trip time in seconds. + fastest = min(cast(float, s.round_trip_time) for s in selection.server_descriptions) + threshold = self._topology_settings.local_threshold_ms / 1000.0 + return [ + s + for s in selection.server_descriptions + if (cast(float, s.round_trip_time) - fastest) <= threshold + ] + + def apply_selector( + self, + selector: Any, + address: Optional[_Address] = None, + custom_selector: Optional[_ServerSelector] = None, + ) -> List[ServerDescription]: + """List of servers matching the provided selector(s). + + :Parameters: + - `selector`: a callable that takes a Selection as input and returns + a Selection as output. For example, an instance of a read + preference from :mod:`~pymongo.read_preferences`. + - `address` (optional): A server address to select. + - `custom_selector` (optional): A callable that augments server + selection rules. Accepts a list of + :class:`~pymongo.server_description.ServerDescription` objects and + return a list of server descriptions that should be considered + suitable for the desired operation. + + .. versionadded:: 3.4 + """ + if getattr(selector, "min_wire_version", 0): + common_wv = self.common_wire_version + if common_wv and common_wv < selector.min_wire_version: + raise ConfigurationError( + "%s requires min wire version %d, but topology's min" + " wire version is %d" % (selector, selector.min_wire_version, common_wv) + ) + + if isinstance(selector, _AggWritePref): + selector.selection_hook(self) + + if self.topology_type == TOPOLOGY_TYPE.Unknown: + return [] + elif self.topology_type in (TOPOLOGY_TYPE.Single, TOPOLOGY_TYPE.LoadBalanced): + # Ignore selectors for standalone and load balancer mode. + return self.known_servers + if address: + # Ignore selectors when explicit address is requested. + description = self.server_descriptions().get(address) + return [description] if description else [] + + selection = Selection.from_topology_description(self) + # Ignore read preference for sharded clusters. + if self.topology_type != TOPOLOGY_TYPE.Sharded: + selection = selector(selection) + + # Apply custom selector followed by localThresholdMS. + if custom_selector is not None and selection: + selection = selection.with_server_descriptions( + custom_selector(selection.server_descriptions) + ) + return self._apply_local_threshold(selection) + + def has_readable_server(self, read_preference: _ServerMode = ReadPreference.PRIMARY) -> bool: + """Does this topology have any readable servers available matching the + given read preference? + + :Parameters: + - `read_preference`: an instance of a read preference from + :mod:`~pymongo.read_preferences`. Defaults to + :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. + + .. note:: When connected directly to a single server this method + always returns ``True``. + + .. versionadded:: 3.4 + """ + common.validate_read_preference("read_preference", read_preference) + return any(self.apply_selector(read_preference)) + + def has_writable_server(self) -> bool: + """Does this topology have a writable server available? + + .. note:: When connected directly to a single server this method + always returns ``True``. + + .. versionadded:: 3.4 + """ + return self.has_readable_server(ReadPreference.PRIMARY) + + def __repr__(self) -> str: + # Sort the servers by address. + servers = sorted(self._server_descriptions.values(), key=lambda sd: sd.address) + return "<{} id: {}, topology_type: {}, servers: {!r}>".format( + self.__class__.__name__, + self._topology_settings._topology_id, + self.topology_type_name, + servers, + ) + + +# If topology type is Unknown and we receive a hello response, what should +# the new topology type be? +_SERVER_TYPE_TO_TOPOLOGY_TYPE = { + SERVER_TYPE.Mongos: TOPOLOGY_TYPE.Sharded, + SERVER_TYPE.RSPrimary: TOPOLOGY_TYPE.ReplicaSetWithPrimary, + SERVER_TYPE.RSSecondary: TOPOLOGY_TYPE.ReplicaSetNoPrimary, + SERVER_TYPE.RSArbiter: TOPOLOGY_TYPE.ReplicaSetNoPrimary, + SERVER_TYPE.RSOther: TOPOLOGY_TYPE.ReplicaSetNoPrimary, + # Note: SERVER_TYPE.LoadBalancer and Unknown are intentionally left out. +} + + +def updated_topology_description( + topology_description: TopologyDescription, server_description: ServerDescription +) -> "TopologyDescription": + """Return an updated copy of a TopologyDescription. + + :Parameters: + - `topology_description`: the current TopologyDescription + - `server_description`: a new ServerDescription that resulted from + a hello call + + Called after attempting (successfully or not) to call hello on the + server at server_description.address. Does not modify topology_description. + """ + address = server_description.address + + # These values will be updated, if necessary, to form the new + # TopologyDescription. + topology_type = topology_description.topology_type + set_name = topology_description.replica_set_name + max_set_version = topology_description.max_set_version + max_election_id = topology_description.max_election_id + server_type = server_description.server_type + + # Don't mutate the original dict of server descriptions; copy it. + sds = topology_description.server_descriptions() + + # Replace this server's description with the new one. + sds[address] = server_description + + if topology_type == TOPOLOGY_TYPE.Single: + # Set server type to Unknown if replica set name does not match. + if set_name is not None and set_name != server_description.replica_set_name: + error = ConfigurationError( + "client is configured to connect to a replica set named " + "'{}' but this node belongs to a set named '{}'".format( + set_name, server_description.replica_set_name + ) + ) + sds[address] = server_description.to_unknown(error=error) + # Single type never changes. + return TopologyDescription( + TOPOLOGY_TYPE.Single, + sds, + set_name, + max_set_version, + max_election_id, + topology_description._topology_settings, + ) + + if topology_type == TOPOLOGY_TYPE.Unknown: + if server_type in (SERVER_TYPE.Standalone, SERVER_TYPE.LoadBalancer): + if len(topology_description._topology_settings.seeds) == 1: + topology_type = TOPOLOGY_TYPE.Single + else: + # Remove standalone from Topology when given multiple seeds. + sds.pop(address) + elif server_type not in (SERVER_TYPE.Unknown, SERVER_TYPE.RSGhost): + topology_type = _SERVER_TYPE_TO_TOPOLOGY_TYPE[server_type] + + if topology_type == TOPOLOGY_TYPE.Sharded: + if server_type not in (SERVER_TYPE.Mongos, SERVER_TYPE.Unknown): + sds.pop(address) + + elif topology_type == TOPOLOGY_TYPE.ReplicaSetNoPrimary: + if server_type in (SERVER_TYPE.Standalone, SERVER_TYPE.Mongos): + sds.pop(address) + + elif server_type == SERVER_TYPE.RSPrimary: + (topology_type, set_name, max_set_version, max_election_id) = _update_rs_from_primary( + sds, set_name, server_description, max_set_version, max_election_id + ) + + elif server_type in (SERVER_TYPE.RSSecondary, SERVER_TYPE.RSArbiter, SERVER_TYPE.RSOther): + topology_type, set_name = _update_rs_no_primary_from_member( + sds, set_name, server_description + ) + + elif topology_type == TOPOLOGY_TYPE.ReplicaSetWithPrimary: + if server_type in (SERVER_TYPE.Standalone, SERVER_TYPE.Mongos): + sds.pop(address) + topology_type = _check_has_primary(sds) + + elif server_type == SERVER_TYPE.RSPrimary: + (topology_type, set_name, max_set_version, max_election_id) = _update_rs_from_primary( + sds, set_name, server_description, max_set_version, max_election_id + ) + + elif server_type in (SERVER_TYPE.RSSecondary, SERVER_TYPE.RSArbiter, SERVER_TYPE.RSOther): + topology_type = _update_rs_with_primary_from_member(sds, set_name, server_description) + + else: + # Server type is Unknown or RSGhost: did we just lose the primary? + topology_type = _check_has_primary(sds) + + # Return updated copy. + return TopologyDescription( + topology_type, + sds, + set_name, + max_set_version, + max_election_id, + topology_description._topology_settings, + ) + + +def _updated_topology_description_srv_polling( + topology_description: TopologyDescription, seedlist: List[Tuple[str, Any]] +) -> TopologyDescription: + """Return an updated copy of a TopologyDescription. + + :Parameters: + - `topology_description`: the current TopologyDescription + - `seedlist`: a list of new seeds new ServerDescription that resulted from + a hello call + """ + assert topology_description.topology_type in SRV_POLLING_TOPOLOGIES + # Create a copy of the server descriptions. + sds = topology_description.server_descriptions() + + # If seeds haven't changed, don't do anything. + if set(sds.keys()) == set(seedlist): + return topology_description + + # Remove SDs corresponding to servers no longer part of the SRV record. + for address in list(sds.keys()): + if address not in seedlist: + sds.pop(address) + + if topology_description.srv_max_hosts != 0: + new_hosts = set(seedlist) - set(sds.keys()) + n_to_add = topology_description.srv_max_hosts - len(sds) + if n_to_add > 0: + seedlist = sample(sorted(new_hosts), min(n_to_add, len(new_hosts))) + else: + seedlist = [] + # Add SDs corresponding to servers recently added to the SRV record. + for address in seedlist: + if address not in sds: + sds[address] = ServerDescription(address) + return TopologyDescription( + topology_description.topology_type, + sds, + topology_description.replica_set_name, + topology_description.max_set_version, + topology_description.max_election_id, + topology_description._topology_settings, + ) + + +def _update_rs_from_primary( + sds: MutableMapping[_Address, ServerDescription], + replica_set_name: Optional[str], + server_description: ServerDescription, + max_set_version: Optional[int], + max_election_id: Optional[ObjectId], +) -> Tuple[int, Optional[str], Optional[int], Optional[ObjectId]]: + """Update topology description from a primary's hello response. + + Pass in a dict of ServerDescriptions, current replica set name, the + ServerDescription we are processing, and the TopologyDescription's + max_set_version and max_election_id if any. + + Returns (new topology type, new replica_set_name, new max_set_version, + new max_election_id). + """ + if replica_set_name is None: + replica_set_name = server_description.replica_set_name + + elif replica_set_name != server_description.replica_set_name: + # We found a primary but it doesn't have the replica_set_name + # provided by the user. + sds.pop(server_description.address) + return _check_has_primary(sds), replica_set_name, max_set_version, max_election_id + + if server_description.max_wire_version is None or server_description.max_wire_version < 17: + new_election_tuple: Tuple = (server_description.set_version, server_description.election_id) + max_election_tuple: Tuple = (max_set_version, max_election_id) + if None not in new_election_tuple: + if None not in max_election_tuple and new_election_tuple < max_election_tuple: + # Stale primary, set to type Unknown. + sds[server_description.address] = server_description.to_unknown() + return _check_has_primary(sds), replica_set_name, max_set_version, max_election_id + max_election_id = server_description.election_id + + if server_description.set_version is not None and ( + max_set_version is None or server_description.set_version > max_set_version + ): + max_set_version = server_description.set_version + else: + new_election_tuple = server_description.election_id, server_description.set_version + max_election_tuple = max_election_id, max_set_version + new_election_safe = tuple(MinKey() if i is None else i for i in new_election_tuple) + max_election_safe = tuple(MinKey() if i is None else i for i in max_election_tuple) + if new_election_safe < max_election_safe: + # Stale primary, set to type Unknown. + sds[server_description.address] = server_description.to_unknown() + return _check_has_primary(sds), replica_set_name, max_set_version, max_election_id + else: + max_election_id = server_description.election_id + max_set_version = server_description.set_version + + # We've heard from the primary. Is it the same primary as before? + for server in sds.values(): + if ( + server.server_type is SERVER_TYPE.RSPrimary + and server.address != server_description.address + ): + + # Reset old primary's type to Unknown. + sds[server.address] = server.to_unknown() + + # There can be only one prior primary. + break + + # Discover new hosts from this primary's response. + for new_address in server_description.all_hosts: + if new_address not in sds: + sds[new_address] = ServerDescription(new_address) + + # Remove hosts not in the response. + for addr in set(sds) - server_description.all_hosts: + sds.pop(addr) + + # If the host list differs from the seed list, we may not have a primary + # after all. + return (_check_has_primary(sds), replica_set_name, max_set_version, max_election_id) + + +def _update_rs_with_primary_from_member( + sds: MutableMapping[_Address, ServerDescription], + replica_set_name: Optional[str], + server_description: ServerDescription, +) -> int: + """RS with known primary. Process a response from a non-primary. + + Pass in a dict of ServerDescriptions, current replica set name, and the + ServerDescription we are processing. + + Returns new topology type. + """ + assert replica_set_name is not None + + if replica_set_name != server_description.replica_set_name: + sds.pop(server_description.address) + elif server_description.me and server_description.address != server_description.me: + sds.pop(server_description.address) + + # Had this member been the primary? + return _check_has_primary(sds) + + +def _update_rs_no_primary_from_member( + sds: MutableMapping[_Address, ServerDescription], + replica_set_name: Optional[str], + server_description: ServerDescription, +) -> Tuple[int, Optional[str]]: + """RS without known primary. Update from a non-primary's response. + + Pass in a dict of ServerDescriptions, current replica set name, and the + ServerDescription we are processing. + + Returns (new topology type, new replica_set_name). + """ + topology_type = TOPOLOGY_TYPE.ReplicaSetNoPrimary + if replica_set_name is None: + replica_set_name = server_description.replica_set_name + + elif replica_set_name != server_description.replica_set_name: + sds.pop(server_description.address) + return topology_type, replica_set_name + + # This isn't the primary's response, so don't remove any servers + # it doesn't report. Only add new servers. + for address in server_description.all_hosts: + if address not in sds: + sds[address] = ServerDescription(address) + + if server_description.me and server_description.address != server_description.me: + sds.pop(server_description.address) + + return topology_type, replica_set_name + + +def _check_has_primary(sds: Mapping[_Address, ServerDescription]) -> int: + """Current topology type is ReplicaSetWithPrimary. Is primary still known? + + Pass in a dict of ServerDescriptions. + + Returns new topology type. + """ + for s in sds.values(): + if s.server_type == SERVER_TYPE.RSPrimary: + return TOPOLOGY_TYPE.ReplicaSetWithPrimary + else: + return TOPOLOGY_TYPE.ReplicaSetNoPrimary diff --git a/backend/test/lib/python3.8/site-packages/pymongo/typings.py b/backend/test/lib/python3.8/site-packages/pymongo/typings.py new file mode 100644 index 0000000000000000000000000000000000000000..3464c92945e81b731bd4dd15cf5d6208e56567c6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/typings.py @@ -0,0 +1,58 @@ +# Copyright 2022-Present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Type aliases used by PyMongo""" +from typing import ( + TYPE_CHECKING, + Any, + Mapping, + Optional, + Sequence, + Tuple, + TypeVar, + Union, +) + +from bson.typings import _DocumentOut, _DocumentType, _DocumentTypeArg + +if TYPE_CHECKING: + from pymongo.collation import Collation + + +# Common Shared Types. +_Address = Tuple[str, Optional[int]] +_CollationIn = Union[Mapping[str, Any], "Collation"] +_Pipeline = Sequence[Mapping[str, Any]] +ClusterTime = Mapping[str, Any] + +_T = TypeVar("_T") + + +def strip_optional(elem: Optional[_T]) -> _T: + """This function is to allow us to cast all of the elements of an iterator from Optional[_T] to _T + while inside a list comprehension. + """ + assert elem is not None + return elem + + +__all__ = [ + "_DocumentOut", + "_DocumentType", + "_DocumentTypeArg", + "_Address", + "_CollationIn", + "_Pipeline", + "strip_optional", +] diff --git a/backend/test/lib/python3.8/site-packages/pymongo/uri_parser.py b/backend/test/lib/python3.8/site-packages/pymongo/uri_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..c0dcc4f8b1f7c2463bc43abec983b842b3e3eb28 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/uri_parser.py @@ -0,0 +1,637 @@ +# Copyright 2011-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you +# may not use this file except in compliance with the License. You +# may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. See the License for the specific language governing +# permissions and limitations under the License. + + +"""Tools to parse and validate a MongoDB URI.""" +from __future__ import annotations + +import re +import sys +import warnings +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Mapping, + MutableMapping, + Optional, + Sized, + Tuple, + Union, + cast, +) +from urllib.parse import unquote_plus + +from pymongo.client_options import _parse_ssl_options +from pymongo.common import ( + INTERNAL_URI_OPTION_NAME_MAP, + SRV_SERVICE_NAME, + URI_OPTIONS_DEPRECATION_MAP, + _CaseInsensitiveDictionary, + get_validated_options, +) +from pymongo.errors import ConfigurationError, InvalidURI +from pymongo.srv_resolver import _HAVE_DNSPYTHON, _SrvResolver +from pymongo.typings import _Address + +if TYPE_CHECKING: + from pymongo.pyopenssl_context import SSLContext + +SCHEME = "mongodb://" +SCHEME_LEN = len(SCHEME) +SRV_SCHEME = "mongodb+srv://" +SRV_SCHEME_LEN = len(SRV_SCHEME) +DEFAULT_PORT = 27017 + + +def _unquoted_percent(s: str) -> bool: + """Check for unescaped percent signs. + + :Parameters: + - `s`: A string. `s` can have things like '%25', '%2525', + and '%E2%85%A8' but cannot have unquoted percent like '%foo'. + """ + for i in range(len(s)): + if s[i] == "%": + sub = s[i : i + 3] + # If unquoting yields the same string this means there was an + # unquoted %. + if unquote_plus(sub) == sub: + return True + return False + + +def parse_userinfo(userinfo: str) -> Tuple[str, str]: + """Validates the format of user information in a MongoDB URI. + Reserved characters that are gen-delimiters (":", "/", "?", "#", "[", + "]", "@") as per RFC 3986 must be escaped. + + Returns a 2-tuple containing the unescaped username followed + by the unescaped password. + + :Parameters: + - `userinfo`: A string of the form <username>:<password> + """ + if "@" in userinfo or userinfo.count(":") > 1 or _unquoted_percent(userinfo): + raise InvalidURI( + "Username and password must be escaped according to " + "RFC 3986, use urllib.parse.quote_plus" + ) + + user, _, passwd = userinfo.partition(":") + # No password is expected with GSSAPI authentication. + if not user: + raise InvalidURI("The empty string is not valid username.") + + return unquote_plus(user), unquote_plus(passwd) + + +def parse_ipv6_literal_host( + entity: str, default_port: Optional[int] +) -> Tuple[str, Optional[Union[str, int]]]: + """Validates an IPv6 literal host:port string. + + Returns a 2-tuple of IPv6 literal followed by port where + port is default_port if it wasn't specified in entity. + + :Parameters: + - `entity`: A string that represents an IPv6 literal enclosed + in braces (e.g. '[::1]' or '[::1]:27017'). + - `default_port`: The port number to use when one wasn't + specified in entity. + """ + if entity.find("]") == -1: + raise ValueError( + "an IPv6 address literal must be enclosed in '[' and ']' according to RFC 2732." + ) + i = entity.find("]:") + if i == -1: + return entity[1:-1], default_port + return entity[1:i], entity[i + 2 :] + + +def parse_host(entity: str, default_port: Optional[int] = DEFAULT_PORT) -> _Address: + """Validates a host string + + Returns a 2-tuple of host followed by port where port is default_port + if it wasn't specified in the string. + + :Parameters: + - `entity`: A host or host:port string where host could be a + hostname or IP address. + - `default_port`: The port number to use when one wasn't + specified in entity. + """ + host = entity + port: Optional[Union[str, int]] = default_port + if entity[0] == "[": + host, port = parse_ipv6_literal_host(entity, default_port) + elif entity.endswith(".sock"): + return entity, default_port + elif entity.find(":") != -1: + if entity.count(":") > 1: + raise ValueError( + "Reserved characters such as ':' must be " + "escaped according RFC 2396. An IPv6 " + "address literal must be enclosed in '[' " + "and ']' according to RFC 2732." + ) + host, port = host.split(":", 1) + if isinstance(port, str): + if not port.isdigit() or int(port) > 65535 or int(port) <= 0: + raise ValueError(f"Port must be an integer between 0 and 65535: {port!r}") + port = int(port) + + # Normalize hostname to lowercase, since DNS is case-insensitive: + # http://tools.ietf.org/html/rfc4343 + # This prevents useless rediscovery if "foo.com" is in the seed list but + # "FOO.com" is in the hello response. + return host.lower(), port + + +# Options whose values are implicitly determined by tlsInsecure. +_IMPLICIT_TLSINSECURE_OPTS = { + "tlsallowinvalidcertificates", + "tlsallowinvalidhostnames", + "tlsdisableocspendpointcheck", +} + + +def _parse_options(opts: str, delim: Optional[str]) -> _CaseInsensitiveDictionary: + """Helper method for split_options which creates the options dict. + Also handles the creation of a list for the URI tag_sets/ + readpreferencetags portion, and the use of a unicode options string. + """ + options = _CaseInsensitiveDictionary() + for uriopt in opts.split(delim): + key, value = uriopt.split("=") + if key.lower() == "readpreferencetags": + options.setdefault(key, []).append(value) + else: + if key in options: + warnings.warn(f"Duplicate URI option '{key}'.") + if key.lower() == "authmechanismproperties": + val = value + else: + val = unquote_plus(value) + options[key] = val + + return options + + +def _handle_security_options(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveDictionary: + """Raise appropriate errors when conflicting TLS options are present in + the options dictionary. + + :Parameters: + - `options`: Instance of _CaseInsensitiveDictionary containing + MongoDB URI options. + """ + # Implicitly defined options must not be explicitly specified. + tlsinsecure = options.get("tlsinsecure") + if tlsinsecure is not None: + for opt in _IMPLICIT_TLSINSECURE_OPTS: + if opt in options: + err_msg = "URI options %s and %s cannot be specified simultaneously." + raise InvalidURI( + err_msg % (options.cased_key("tlsinsecure"), options.cased_key(opt)) + ) + + # Handle co-occurence of OCSP & tlsAllowInvalidCertificates options. + tlsallowinvalidcerts = options.get("tlsallowinvalidcertificates") + if tlsallowinvalidcerts is not None: + if "tlsdisableocspendpointcheck" in options: + err_msg = "URI options %s and %s cannot be specified simultaneously." + raise InvalidURI( + err_msg + % ("tlsallowinvalidcertificates", options.cased_key("tlsdisableocspendpointcheck")) + ) + if tlsallowinvalidcerts is True: + options["tlsdisableocspendpointcheck"] = True + + # Handle co-occurence of CRL and OCSP-related options. + tlscrlfile = options.get("tlscrlfile") + if tlscrlfile is not None: + for opt in ("tlsinsecure", "tlsallowinvalidcertificates", "tlsdisableocspendpointcheck"): + if options.get(opt) is True: + err_msg = "URI option %s=True cannot be specified when CRL checking is enabled." + raise InvalidURI(err_msg % (opt,)) + + if "ssl" in options and "tls" in options: + + def truth_value(val: Any) -> Any: + if val in ("true", "false"): + return val == "true" + if isinstance(val, bool): + return val + return val + + if truth_value(options.get("ssl")) != truth_value(options.get("tls")): + err_msg = "Can not specify conflicting values for URI options %s and %s." + raise InvalidURI(err_msg % (options.cased_key("ssl"), options.cased_key("tls"))) + + return options + + +def _handle_option_deprecations(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveDictionary: + """Issue appropriate warnings when deprecated options are present in the + options dictionary. Removes deprecated option key, value pairs if the + options dictionary is found to also have the renamed option. + + :Parameters: + - `options`: Instance of _CaseInsensitiveDictionary containing + MongoDB URI options. + """ + for optname in list(options): + if optname in URI_OPTIONS_DEPRECATION_MAP: + mode, message = URI_OPTIONS_DEPRECATION_MAP[optname] + if mode == "renamed": + newoptname = message + if newoptname in options: + warn_msg = "Deprecated option '%s' ignored in favor of '%s'." + warnings.warn( + warn_msg % (options.cased_key(optname), options.cased_key(newoptname)), + DeprecationWarning, + stacklevel=2, + ) + options.pop(optname) + continue + warn_msg = "Option '%s' is deprecated, use '%s' instead." + warnings.warn( + warn_msg % (options.cased_key(optname), newoptname), + DeprecationWarning, + stacklevel=2, + ) + elif mode == "removed": + warn_msg = "Option '%s' is deprecated. %s." + warnings.warn( + warn_msg % (options.cased_key(optname), message), + DeprecationWarning, + stacklevel=2, + ) + + return options + + +def _normalize_options(options: _CaseInsensitiveDictionary) -> _CaseInsensitiveDictionary: + """Normalizes option names in the options dictionary by converting them to + their internally-used names. + + :Parameters: + - `options`: Instance of _CaseInsensitiveDictionary containing + MongoDB URI options. + """ + # Expand the tlsInsecure option. + tlsinsecure = options.get("tlsinsecure") + if tlsinsecure is not None: + for opt in _IMPLICIT_TLSINSECURE_OPTS: + # Implicit options are logically the same as tlsInsecure. + options[opt] = tlsinsecure + + for optname in list(options): + intname = INTERNAL_URI_OPTION_NAME_MAP.get(optname, None) + if intname is not None: + options[intname] = options.pop(optname) + + return options + + +def validate_options(opts: Mapping[str, Any], warn: bool = False) -> MutableMapping[str, Any]: + """Validates and normalizes options passed in a MongoDB URI. + + Returns a new dictionary of validated and normalized options. If warn is + False then errors will be thrown for invalid options, otherwise they will + be ignored and a warning will be issued. + + :Parameters: + - `opts`: A dict of MongoDB URI options. + - `warn` (optional): If ``True`` then warnings will be logged and + invalid options will be ignored. Otherwise invalid options will + cause errors. + """ + return get_validated_options(opts, warn) + + +def split_options( + opts: str, validate: bool = True, warn: bool = False, normalize: bool = True +) -> MutableMapping[str, Any]: + """Takes the options portion of a MongoDB URI, validates each option + and returns the options in a dictionary. + + :Parameters: + - `opt`: A string representing MongoDB URI options. + - `validate`: If ``True`` (the default), validate and normalize all + options. + - `warn`: If ``False`` (the default), suppress all warnings raised + during validation of options. + - `normalize`: If ``True`` (the default), renames all options to their + internally-used names. + """ + and_idx = opts.find("&") + semi_idx = opts.find(";") + try: + if and_idx >= 0 and semi_idx >= 0: + raise InvalidURI("Can not mix '&' and ';' for option separators.") + elif and_idx >= 0: + options = _parse_options(opts, "&") + elif semi_idx >= 0: + options = _parse_options(opts, ";") + elif opts.find("=") != -1: + options = _parse_options(opts, None) + else: + raise ValueError + except ValueError: + raise InvalidURI("MongoDB URI options are key=value pairs.") + + options = _handle_security_options(options) + + options = _handle_option_deprecations(options) + + if normalize: + options = _normalize_options(options) + + if validate: + options = cast(_CaseInsensitiveDictionary, validate_options(options, warn)) + if options.get("authsource") == "": + raise InvalidURI("the authSource database cannot be an empty string") + + return options + + +def split_hosts(hosts: str, default_port: Optional[int] = DEFAULT_PORT) -> List[_Address]: + """Takes a string of the form host1[:port],host2[:port]... and + splits it into (host, port) tuples. If [:port] isn't present the + default_port is used. + + Returns a set of 2-tuples containing the host name (or IP) followed by + port number. + + :Parameters: + - `hosts`: A string of the form host1[:port],host2[:port],... + - `default_port`: The port number to use when one wasn't specified + for a host. + """ + nodes = [] + for entity in hosts.split(","): + if not entity: + raise ConfigurationError("Empty host (or extra comma in host list).") + port = default_port + # Unix socket entities don't have ports + if entity.endswith(".sock"): + port = None + nodes.append(parse_host(entity, port)) + return nodes + + +# Prohibited characters in database name. DB names also can't have ".", but for +# backward-compat we allow "db.collection" in URI. +_BAD_DB_CHARS = re.compile("[" + re.escape(r'/ "$') + "]") + +_ALLOWED_TXT_OPTS = frozenset( + ["authsource", "authSource", "replicaset", "replicaSet", "loadbalanced", "loadBalanced"] +) + + +def _check_options(nodes: Sized, options: Mapping[str, Any]) -> None: + # Ensure directConnection was not True if there are multiple seeds. + if len(nodes) > 1 and options.get("directconnection"): + raise ConfigurationError("Cannot specify multiple hosts with directConnection=true") + + if options.get("loadbalanced"): + if len(nodes) > 1: + raise ConfigurationError("Cannot specify multiple hosts with loadBalanced=true") + if options.get("directconnection"): + raise ConfigurationError("Cannot specify directConnection=true with loadBalanced=true") + if options.get("replicaset"): + raise ConfigurationError("Cannot specify replicaSet with loadBalanced=true") + + +def parse_uri( + uri: str, + default_port: Optional[int] = DEFAULT_PORT, + validate: bool = True, + warn: bool = False, + normalize: bool = True, + connect_timeout: Optional[float] = None, + srv_service_name: Optional[str] = None, + srv_max_hosts: Optional[int] = None, +) -> Dict[str, Any]: + """Parse and validate a MongoDB URI. + + Returns a dict of the form:: + + { + 'nodelist': <list of (host, port) tuples>, + 'username': <username> or None, + 'password': <password> or None, + 'database': <database name> or None, + 'collection': <collection name> or None, + 'options': <dict of MongoDB URI options>, + 'fqdn': <fqdn of the MongoDB+SRV URI> or None + } + + If the URI scheme is "mongodb+srv://" DNS SRV and TXT lookups will be done + to build nodelist and options. + + :Parameters: + - `uri`: The MongoDB URI to parse. + - `default_port`: The port number to use when one wasn't specified + for a host in the URI. + - `validate` (optional): If ``True`` (the default), validate and + normalize all options. Default: ``True``. + - `warn` (optional): When validating, if ``True`` then will warn + the user then ignore any invalid options or values. If ``False``, + validation will error when options are unsupported or values are + invalid. Default: ``False``. + - `normalize` (optional): If ``True``, convert names of URI options + to their internally-used names. Default: ``True``. + - `connect_timeout` (optional): The maximum time in milliseconds to + wait for a response from the DNS server. + - 'srv_service_name` (optional): A custom SRV service name + + .. versionchanged:: 4.0 + To better follow RFC 3986, unquoted percent signs ("%") are no longer + supported. + + .. versionchanged:: 3.9 + Added the ``normalize`` parameter. + + .. versionchanged:: 3.6 + Added support for mongodb+srv:// URIs. + + .. versionchanged:: 3.5 + Return the original value of the ``readPreference`` MongoDB URI option + instead of the validated read preference mode. + + .. versionchanged:: 3.1 + ``warn`` added so invalid options can be ignored. + """ + if uri.startswith(SCHEME): + is_srv = False + scheme_free = uri[SCHEME_LEN:] + elif uri.startswith(SRV_SCHEME): + if not _HAVE_DNSPYTHON: + python_path = sys.executable or "python" + raise ConfigurationError( + 'The "dnspython" module must be ' + "installed to use mongodb+srv:// URIs. " + "To fix this error install pymongo again:\n " + "%s -m pip install pymongo>=4.3" % (python_path) + ) + is_srv = True + scheme_free = uri[SRV_SCHEME_LEN:] + else: + raise InvalidURI(f"Invalid URI scheme: URI must begin with '{SCHEME}' or '{SRV_SCHEME}'") + + if not scheme_free: + raise InvalidURI("Must provide at least one hostname or IP.") + + user = None + passwd = None + dbase = None + collection = None + options = _CaseInsensitiveDictionary() + + host_part, _, path_part = scheme_free.partition("/") + if not host_part: + host_part = path_part + path_part = "" + + if not path_part and "?" in host_part: + raise InvalidURI("A '/' is required between the host list and any options.") + + if path_part: + dbase, _, opts = path_part.partition("?") + if dbase: + dbase = unquote_plus(dbase) + if "." in dbase: + dbase, collection = dbase.split(".", 1) + if _BAD_DB_CHARS.search(dbase): + raise InvalidURI('Bad database name "%s"' % dbase) + else: + dbase = None + + if opts: + options.update(split_options(opts, validate, warn, normalize)) + if srv_service_name is None: + srv_service_name = options.get("srvServiceName", SRV_SERVICE_NAME) + if "@" in host_part: + userinfo, _, hosts = host_part.rpartition("@") + user, passwd = parse_userinfo(userinfo) + else: + hosts = host_part + + if "/" in hosts: + raise InvalidURI("Any '/' in a unix domain socket must be percent-encoded: %s" % host_part) + + hosts = unquote_plus(hosts) + fqdn = None + srv_max_hosts = srv_max_hosts or options.get("srvMaxHosts") + if is_srv: + if options.get("directConnection"): + raise ConfigurationError(f"Cannot specify directConnection=true with {SRV_SCHEME} URIs") + nodes = split_hosts(hosts, default_port=None) + if len(nodes) != 1: + raise InvalidURI(f"{SRV_SCHEME} URIs must include one, and only one, hostname") + fqdn, port = nodes[0] + if port is not None: + raise InvalidURI(f"{SRV_SCHEME} URIs must not include a port number") + + # Use the connection timeout. connectTimeoutMS passed as a keyword + # argument overrides the same option passed in the connection string. + connect_timeout = connect_timeout or options.get("connectTimeoutMS") + dns_resolver = _SrvResolver(fqdn, connect_timeout, srv_service_name, srv_max_hosts) + nodes = dns_resolver.get_hosts() + dns_options = dns_resolver.get_options() + if dns_options: + parsed_dns_options = split_options(dns_options, validate, warn, normalize) + if set(parsed_dns_options) - _ALLOWED_TXT_OPTS: + raise ConfigurationError( + "Only authSource, replicaSet, and loadBalanced are supported from DNS" + ) + for opt, val in parsed_dns_options.items(): + if opt not in options: + options[opt] = val + if options.get("loadBalanced") and srv_max_hosts: + raise InvalidURI("You cannot specify loadBalanced with srvMaxHosts") + if options.get("replicaSet") and srv_max_hosts: + raise InvalidURI("You cannot specify replicaSet with srvMaxHosts") + if "tls" not in options and "ssl" not in options: + options["tls"] = True if validate else "true" + elif not is_srv and options.get("srvServiceName") is not None: + raise ConfigurationError( + "The srvServiceName option is only allowed with 'mongodb+srv://' URIs" + ) + elif not is_srv and srv_max_hosts: + raise ConfigurationError( + "The srvMaxHosts option is only allowed with 'mongodb+srv://' URIs" + ) + else: + nodes = split_hosts(hosts, default_port=default_port) + + _check_options(nodes, options) + + return { + "nodelist": nodes, + "username": user, + "password": passwd, + "database": dbase, + "collection": collection, + "options": options, + "fqdn": fqdn, + } + + +def _parse_kms_tls_options(kms_tls_options: Optional[Mapping[str, Any]]) -> Dict[str, SSLContext]: + """Parse KMS TLS connection options.""" + if not kms_tls_options: + return {} + if not isinstance(kms_tls_options, dict): + raise TypeError("kms_tls_options must be a dict") + contexts = {} + for provider, opts in kms_tls_options.items(): + if not isinstance(opts, dict): + raise TypeError(f'kms_tls_options["{provider}"] must be a dict') + opts.setdefault("tls", True) + opts = _CaseInsensitiveDictionary(opts) + opts = _handle_security_options(opts) + opts = _normalize_options(opts) + opts = validate_options(opts) + ssl_context, allow_invalid_hostnames = _parse_ssl_options(opts) + if ssl_context is None: + raise ConfigurationError("TLS is required for KMS providers") + if allow_invalid_hostnames: + raise ConfigurationError("Insecure TLS options prohibited") + + for n in [ + "tlsInsecure", + "tlsAllowInvalidCertificates", + "tlsAllowInvalidHostnames", + "tlsDisableCertificateRevocationCheck", + ]: + if n in opts: + raise ConfigurationError(f"Insecure TLS options prohibited: {n}") + contexts[provider] = ssl_context + return contexts + + +if __name__ == "__main__": + import pprint + + try: + pprint.pprint(parse_uri(sys.argv[1])) + except InvalidURI as exc: + print(exc) + sys.exit(0) diff --git a/backend/test/lib/python3.8/site-packages/pymongo/write_concern.py b/backend/test/lib/python3.8/site-packages/pymongo/write_concern.py new file mode 100644 index 0000000000000000000000000000000000000000..6487197b0e3b3829d887487990707cbcc24a1c20 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/pymongo/write_concern.py @@ -0,0 +1,137 @@ +# Copyright 2014-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tools for working with write concerns.""" + +from typing import Any, Dict, Optional, Union + +from pymongo.errors import ConfigurationError + + +# Moved here to avoid a circular import. +def validate_boolean(option: str, value: Any) -> bool: + """Validates that 'value' is True or False.""" + if isinstance(value, bool): + return value + raise TypeError(f"{option} must be True or False, was: {option}={value}") + + +class WriteConcern: + """WriteConcern + + :Parameters: + - `w`: (integer or string) Used with replication, write operations + will block until they have been replicated to the specified number + or tagged set of servers. `w=<integer>` always includes the replica + set primary (e.g. w=3 means write to the primary and wait until + replicated to **two** secondaries). **w=0 disables acknowledgement + of write operations and can not be used with other write concern + options.** + - `wtimeout`: (integer) Used in conjunction with `w`. Specify a value + in milliseconds to control how long to wait for write propagation + to complete. If replication does not complete in the given + timeframe, a timeout exception is raised. + - `j`: If ``True`` block until write operations have been committed + to the journal. Cannot be used in combination with `fsync`. Write + operations will fail with an exception if this option is used when + the server is running without journaling. + - `fsync`: If ``True`` and the server is running without journaling, + blocks until the server has synced all data files to disk. If the + server is running with journaling, this acts the same as the `j` + option, blocking until write operations have been committed to the + journal. Cannot be used in combination with `j`. + """ + + __slots__ = ("__document", "__acknowledged", "__server_default") + + def __init__( + self, + w: Optional[Union[int, str]] = None, + wtimeout: Optional[int] = None, + j: Optional[bool] = None, + fsync: Optional[bool] = None, + ) -> None: + self.__document: Dict[str, Any] = {} + self.__acknowledged = True + + if wtimeout is not None: + if not isinstance(wtimeout, int): + raise TypeError("wtimeout must be an integer") + if wtimeout < 0: + raise ValueError("wtimeout cannot be less than 0") + self.__document["wtimeout"] = wtimeout + + if j is not None: + validate_boolean("j", j) + self.__document["j"] = j + + if fsync is not None: + validate_boolean("fsync", fsync) + if j and fsync: + raise ConfigurationError("Can't set both j and fsync at the same time") + self.__document["fsync"] = fsync + + if w == 0 and j is True: + raise ConfigurationError("Cannot set w to 0 and j to True") + + if w is not None: + if isinstance(w, int): + if w < 0: + raise ValueError("w cannot be less than 0") + self.__acknowledged = w > 0 + elif not isinstance(w, str): + raise TypeError("w must be an integer or string") + self.__document["w"] = w + + self.__server_default = not self.__document + + @property + def is_server_default(self) -> bool: + """Does this WriteConcern match the server default.""" + return self.__server_default + + @property + def document(self) -> Dict[str, Any]: + """The document representation of this write concern. + + .. note:: + :class:`WriteConcern` is immutable. Mutating the value of + :attr:`document` does not mutate this :class:`WriteConcern`. + """ + return self.__document.copy() + + @property + def acknowledged(self) -> bool: + """If ``True`` write operations will wait for acknowledgement before + returning. + """ + return self.__acknowledged + + def __repr__(self) -> str: + return "WriteConcern({})".format( + ", ".join("{}={}".format(*kvt) for kvt in self.__document.items()) + ) + + def __eq__(self, other: Any) -> bool: + if isinstance(other, WriteConcern): + return self.__document == other.document + return NotImplemented + + def __ne__(self, other: Any) -> bool: + if isinstance(other, WriteConcern): + return self.__document != other.document + return NotImplemented + + +DEFAULT_WRITE_CONCERN = WriteConcern() diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/AUTHORS.txt b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/AUTHORS.txt new file mode 100644 index 0000000000000000000000000000000000000000..72c87d7d38ae7bf859717c333a5ee8230f6ce624 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/AUTHORS.txt @@ -0,0 +1,562 @@ +A_Rog <adam.thomas.rogerson@gmail.com> +Aakanksha Agrawal <11389424+rasponic@users.noreply.github.com> +Abhinav Sagar <40603139+abhinavsagar@users.noreply.github.com> +ABHYUDAY PRATAP SINGH <abhyudaypratap@outlook.com> +abs51295 <aagams68@gmail.com> +AceGentile <ventogrigio83@gmail.com> +Adam Chainz <adam@adamj.eu> +Adam Tse <adam.tse@me.com> +Adam Tse <atse@users.noreply.github.com> +Adam Wentz <awentz@theonion.com> +admin <admin@admins-MacBook-Pro.local> +Adrien Morison <adrien.morison@gmail.com> +ahayrapetyan <ahayrapetya2@bloomberg.net> +Ahilya <ahilya16009@iiitd.ac.in> +AinsworthK <yat626@yahoo.com.hk> +Akash Srivastava <akashsrivastava4927@gmail.com> +Alan Yee <alyee@ucsd.edu> +Albert Tugushev <albert@tugushev.ru> +Albert-Guan <albert.guan94@gmail.com> +albertg <albert.guan94@gmail.com> +Aleks Bunin <github@compuix.com> +Alethea Flowers <magicalgirl@google.com> +Alex Gaynor <alex.gaynor@gmail.com> +Alex Grönholm <alex.gronholm@nextday.fi> +Alex Loosley <a.loosley@reply.de> +Alex Morega <alex@grep.ro> +Alex Stachowiak <alexander@computer.org> +Alexander Shtyrov <rawzausho@gmail.com> +Alexandre Conrad <alexandre.conrad@gmail.com> +Alexey Popravka <a.popravka@smartweb.com.ua> +Alexey Popravka <alexey.popravka@horsedevel.com> +Alli <alzeih@users.noreply.github.com> +Ami Fischman <ami@fischman.org> +Ananya Maiti <ananyoevo@gmail.com> +Anatoly Techtonik <techtonik@gmail.com> +Anders Kaseorg <andersk@mit.edu> +Andreas Lutro <anlutro@gmail.com> +Andrei Geacar <andrei.geacar@gmail.com> +Andrew Gaul <andrew@gaul.org> +Andrey Bulgakov <mail@andreiko.ru> +Andrés Delfino <34587441+andresdelfino@users.noreply.github.com> +Andrés Delfino <adelfino@gmail.com> +Andy Freeland <andy.freeland@redjack.com> +Andy Freeland <andy@andyfreeland.net> +Andy Kluger <AndydeCleyre@users.noreply.github.com> +Ani Hayrapetyan <ahayrapetya2@bloomberg.net> +Aniruddha Basak <codewithaniruddha@gmail.com> +Anish Tambe <anish.tambe@yahoo.in> +Anrs Hu <anrs@douban.com> +Anthony Sottile <asottile@umich.edu> +Antoine Musso <hashar@free.fr> +Anton Ovchinnikov <revolver112@gmail.com> +Anton Patrushev <apatrushev@gmail.com> +Antonio Alvarado Hernandez <tnotstar@gmail.com> +Antony Lee <anntzer.lee@gmail.com> +Antti Kaihola <akaihol+github@ambitone.com> +Anubhav Patel <anubhavp28@gmail.com> +Anuj Godase <godaseanuj@gmail.com> +AQNOUCH Mohammed <aqnouch.mohammed@gmail.com> +AraHaan <seandhunt_7@yahoo.com> +Arindam Choudhury <arindam@live.com> +Armin Ronacher <armin.ronacher@active-4.com> +Artem <duketemon@users.noreply.github.com> +Ashley Manton <ajd.manton@googlemail.com> +Ashwin Ramaswami <aramaswamis@gmail.com> +atse <atse@users.noreply.github.com> +Atsushi Odagiri <aodagx@gmail.com> +Avner Cohen <israbirding@gmail.com> +Baptiste Mispelon <bmispelon@gmail.com> +Barney Gale <barney.gale@gmail.com> +barneygale <barney.gale@gmail.com> +Bartek Ogryczak <b.ogryczak@gmail.com> +Bastian Venthur <mail@venthur.de> +Ben Darnell <ben@bendarnell.com> +Ben Hoyt <benhoyt@gmail.com> +Ben Rosser <rosser.bjr@gmail.com> +Bence Nagy <bence@underyx.me> +Benjamin Peterson <benjamin@python.org> +Benjamin VanEvery <ben@simondata.com> +Benoit Pierre <benoit.pierre@gmail.com> +Berker Peksag <berker.peksag@gmail.com> +Bernardo B. Marques <bernardo.fire@gmail.com> +Bernhard M. Wiedemann <bwiedemann@suse.de> +Bertil Hatt <bertil.hatt@farfetch.com> +Bogdan Opanchuk <bogdan@opanchuk.net> +BorisZZZ <BorisZZZ@users.noreply.github.com> +Brad Erickson <eosrei@gmail.com> +Bradley Ayers <bradley.ayers@gmail.com> +Brandon L. Reiss <brandon@damyata.co> +Brandt Bucher <brandtbucher@gmail.com> +Brett Randall <javabrett@gmail.com> +Brian Cristante <33549821+brcrista@users.noreply.github.com> +Brian Cristante <brcrista@microsoft.com> +Brian Rosner <brosner@gmail.com> +BrownTruck <BrownTruck@users.noreply.github.com> +Bruno Oliveira <nicoddemus@gmail.com> +Bruno Renié <brutasse@gmail.com> +Bstrdsmkr <bstrdsmkr@gmail.com> +Buck Golemon <buck@yelp.com> +burrows <burrows@preveil.com> +Bussonnier Matthias <bussonniermatthias@gmail.com> +c22 <c22@users.noreply.github.com> +Caleb Martinez <accounts@calebmartinez.com> +Calvin Smith <eukaryote@users.noreply.github.com> +Carl Meyer <carl@oddbird.net> +Carlos Liam <carlos@aarzee.me> +Carol Willing <carolcode@willingconsulting.com> +Carter Thayer <carterwthayer@gmail.com> +Cass <cass.petrus@gmail.com> +Chandrasekhar Atina <chandu.atina@gmail.com> +Chih-Hsuan Yen <yan12125@gmail.com> +Chih-Hsuan Yen <yen@chyen.cc> +Chris Brinker <chris.brinker@gmail.com> +Chris Hunt <chrahunt@gmail.com> +Chris Jerdonek <chris.jerdonek@gmail.com> +Chris McDonough <chrism@plope.com> +Chris Wolfe <chriswwolfe@gmail.com> +Christian Heimes <christian@python.org> +Christian Oudard <christian.oudard@gmail.com> +Christopher Hunt <chrahunt@gmail.com> +Christopher Snyder <cnsnyder@users.noreply.github.com> +Clark Boylan <clark.boylan@gmail.com> +Clay McClure <clay@daemons.net> +Cody <Purring@users.noreply.github.com> +Cody Soyland <codysoyland@gmail.com> +Colin Watson <cjwatson@debian.org> +Connor Osborn <cdosborn@email.arizona.edu> +Cooper Lees <me@cooperlees.com> +Cooper Ry Lees <me@cooperlees.com> +Cory Benfield <lukasaoz@gmail.com> +Cory Wright <corywright@gmail.com> +Craig Kerstiens <craig.kerstiens@gmail.com> +Cristian Sorinel <cristian.sorinel@gmail.com> +Curtis Doty <Curtis@GreenKey.net> +cytolentino <ctolentino8@bloomberg.net> +Damian Quiroga <qdamian@gmail.com> +Dan Black <dyspop@gmail.com> +Dan Savilonis <djs@n-cube.org> +Dan Sully <daniel-github@electricrain.com> +daniel <mcdonaldd@unimelb.edu.au> +Daniel Collins <accounts@dac.io> +Daniel Hahler <git@thequod.de> +Daniel Holth <dholth@fastmail.fm> +Daniel Jost <torpedojost@gmail.com> +Daniel Shaulov <daniel.shaulov@gmail.com> +Daniele Esposti <expobrain@users.noreply.github.com> +Daniele Procida <daniele@vurt.org> +Danny Hermes <daniel.j.hermes@gmail.com> +Dav Clark <davclark@gmail.com> +Dave Abrahams <dave@boostpro.com> +Dave Jones <dave@waveform.org.uk> +David Aguilar <davvid@gmail.com> +David Black <db@d1b.org> +David Bordeynik <david.bordeynik@gmail.com> +David Bordeynik <david@zebra-med.com> +David Caro <david@dcaro.es> +David Evans <d@drhevans.com> +David Linke <dr.david.linke@gmail.com> +David Pursehouse <david.pursehouse@gmail.com> +David Tucker <david@tucker.name> +David Wales <daviewales@gmail.com> +Davidovich <david.genest@gmail.com> +derwolfe <chriswwolfe@gmail.com> +Desetude <harry@desetude.com> +Diego Caraballo <diegocaraballo84@gmail.com> +DiegoCaraballo <diegocaraballo84@gmail.com> +Dmitry Gladkov <dmitry.gladkov@gmail.com> +Domen Kožar <domen@dev.si> +Donald Stufft <donald@stufft.io> +Dongweiming <dongweiming@admaster.com.cn> +Douglas Thor <dougthor42@users.noreply.github.com> +DrFeathers <WilliamGeorgeBurgess@gmail.com> +Dustin Ingram <di@di.codes> +Dwayne Bailey <dwayne@translate.org.za> +Ed Morley <501702+edmorley@users.noreply.github.com> +Ed Morley <emorley@mozilla.com> +Eitan Adler <lists@eitanadler.com> +ekristina <panacejja@gmail.com> +elainechan <elaine.chan@outlook.com> +Eli Schwartz <eschwartz93@gmail.com> +Eli Schwartz <eschwartz@archlinux.org> +Emil Burzo <contact@emilburzo.com> +Emil Styrke <emil.styrke@gmail.com> +Endoh Takanao <djmchl@gmail.com> +enoch <lanxenet@gmail.com> +Erdinc Mutlu <erdinc_mutlu@yahoo.com> +Eric Gillingham <Gillingham@bikezen.net> +Eric Hanchrow <eric.hanchrow@gmail.com> +Eric Hopper <hopper@omnifarious.org> +Erik M. Bray <embray@stsci.edu> +Erik Rose <erik@mozilla.com> +Ernest W Durbin III <ewdurbin@gmail.com> +Ernest W. Durbin III <ewdurbin@gmail.com> +Erwin Janssen <erwinjanssen@outlook.com> +Eugene Vereshchagin <evvers@gmail.com> +everdimension <everdimension@gmail.com> +Felix Yan <felixonmars@archlinux.org> +fiber-space <fiber-space@users.noreply.github.com> +Filip Kokosiński <filip.kokosinski@gmail.com> +Florian Briand <ownerfrance+github@hotmail.com> +Florian Rathgeber <florian.rathgeber@gmail.com> +Francesco <f.guerrieri@gmail.com> +Francesco Montesano <franz.bergesund@gmail.com> +Frost Ming <mianghong@gmail.com> +Gabriel Curio <g.curio@gmail.com> +Gabriel de Perthuis <g2p.code@gmail.com> +Garry Polley <garrympolley@gmail.com> +gdanielson <graeme.danielson@gmail.com> +Geoffrey Lehée <geoffrey@lehee.name> +Geoffrey Sneddon <me@gsnedders.com> +George Song <george@55minutes.com> +Georgi Valkov <georgi.t.valkov@gmail.com> +Giftlin Rajaiah <giftlin.rgn@gmail.com> +gizmoguy1 <gizmoguy1@gmail.com> +gkdoc <40815324+gkdoc@users.noreply.github.com> +Gopinath M <31352222+mgopi1990@users.noreply.github.com> +GOTO Hayato <3532528+gh640@users.noreply.github.com> +gpiks <gaurav.pikale@gmail.com> +Guilherme Espada <porcariadagata@gmail.com> +Guy Rozendorn <guy@rzn.co.il> +gzpan123 <gzpan123@gmail.com> +Hanjun Kim <hallazzang@gmail.com> +Hari Charan <hcharan997@gmail.com> +Harsh Vardhan <harsh59v@gmail.com> +Herbert Pfennig <herbert@albinen.com> +Hsiaoming Yang <lepture@me.com> +Hugo <hugovk@users.noreply.github.com> +Hugo Lopes Tavares <hltbra@gmail.com> +Hugo van Kemenade <hugovk@users.noreply.github.com> +hugovk <hugovk@users.noreply.github.com> +Hynek Schlawack <hs@ox.cx> +Ian Bicking <ianb@colorstudy.com> +Ian Cordasco <graffatcolmingov@gmail.com> +Ian Lee <IanLee1521@gmail.com> +Ian Stapleton Cordasco <graffatcolmingov@gmail.com> +Ian Wienand <ian@wienand.org> +Ian Wienand <iwienand@redhat.com> +Igor Kuzmitshov <kuzmiigo@gmail.com> +Igor Sobreira <igor@igorsobreira.com> +Ilya Baryshev <baryshev@gmail.com> +INADA Naoki <songofacandy@gmail.com> +Ionel Cristian Mărieș <contact@ionelmc.ro> +Ionel Maries Cristian <ionel.mc@gmail.com> +Ivan Pozdeev <vano@mail.mipt.ru> +Jacob Kim <me@thejacobkim.com> +jakirkham <jakirkham@gmail.com> +Jakub Stasiak <kuba.stasiak@gmail.com> +Jakub Vysoky <jakub@borka.cz> +Jakub Wilk <jwilk@jwilk.net> +James Cleveland <jamescleveland@gmail.com> +James Cleveland <radiosilence@users.noreply.github.com> +James Firth <hello@james-firth.com> +James Polley <jp@jamezpolley.com> +Jan Pokorný <jpokorny@redhat.com> +Jannis Leidel <jannis@leidel.info> +jarondl <me@jarondl.net> +Jason R. Coombs <jaraco@jaraco.com> +Jay Graves <jay@skabber.com> +Jean-Christophe Fillion-Robin <jchris.fillionr@kitware.com> +Jeff Barber <jbarber@computer.org> +Jeff Dairiki <dairiki@dairiki.org> +Jelmer Vernooij <jelmer@jelmer.uk> +jenix21 <devfrog@gmail.com> +Jeremy Stanley <fungi@yuggoth.org> +Jeremy Zafran <jzafran@users.noreply.github.com> +Jiashuo Li <jiasli@microsoft.com> +Jim Garrison <jim@garrison.cc> +Jivan Amara <Development@JivanAmara.net> +John Paton <j.paton@catawiki.nl> +John-Scott Atlakson <john.scott.atlakson@gmail.com> +johnthagen <johnthagen@gmail.com> +johnthagen <johnthagen@users.noreply.github.com> +Jon Banafato <jon@jonafato.com> +Jon Dufresne <jon.dufresne@gmail.com> +Jon Parise <jon@indelible.org> +Jonas Nockert <jonasnockert@gmail.com> +Jonathan Herbert <foohyfooh@gmail.com> +Joost Molenaar <j.j.molenaar@gmail.com> +Jorge Niedbalski <niedbalski@gmail.com> +Joseph Long <jdl@fastmail.fm> +Josh Bronson <jabronson@gmail.com> +Josh Hansen <josh@skwash.net> +Josh Schneier <josh.schneier@gmail.com> +Juanjo Bazán <jjbazan@gmail.com> +Julian Berman <Julian@GrayVines.com> +Julian Gethmann <julian.gethmann@kit.edu> +Julien Demoor <julien@jdemoor.com> +jwg4 <jack.grahl@yahoo.co.uk> +Jyrki Pulliainen <jyrki@spotify.com> +Kai Chen <kaichen120@gmail.com> +Kamal Bin Mustafa <kamal@smach.net> +kaustav haldar <hi@kaustav.me> +keanemind <keanemind@gmail.com> +Keith Maxwell <keith.maxwell@gmail.com> +Kelsey Hightower <kelsey.hightower@gmail.com> +Kenneth Belitzky <kenny@belitzky.com> +Kenneth Reitz <me@kennethreitz.com> +Kenneth Reitz <me@kennethreitz.org> +Kevin Burke <kev@inburke.com> +Kevin Carter <kevin.carter@rackspace.com> +Kevin Frommelt <kevin.frommelt@webfilings.com> +Kevin R Patterson <kevin.r.patterson@intel.com> +Kexuan Sun <me@kianasun.com> +Kit Randel <kit@nocturne.net.nz> +kpinc <kop@meme.com> +Krishna Oza <krishoza15sep@gmail.com> +Kumar McMillan <kumar.mcmillan@gmail.com> +Kyle Persohn <kyle.persohn@gmail.com> +lakshmanaram <lakshmanaram.n@gmail.com> +Laszlo Kiss-Kollar <kiss.kollar.laszlo@gmail.com> +Laurent Bristiel <laurent@bristiel.com> +Laurie Opperman <laurie@sitesee.com.au> +Leon Sasson <leonsassonha@gmail.com> +Lev Givon <lev@columbia.edu> +Lincoln de Sousa <lincoln@comum.org> +Lipis <lipiridis@gmail.com> +Loren Carvalho <lcarvalho@linkedin.com> +Lucas Cimon <lucas.cimon@gmail.com> +Ludovic Gasc <gmludo@gmail.com> +Luke Macken <lmacken@redhat.com> +Luo Jiebin <luo.jiebin@qq.com> +luojiebin <luojiebin@users.noreply.github.com> +luz.paz <luzpaz@users.noreply.github.com> +László Kiss Kollár <lkisskollar@bloomberg.net> +László Kiss Kollár <lkollar@users.noreply.github.com> +Marc Abramowitz <marc@marc-abramowitz.com> +Marc Tamlyn <marc.tamlyn@gmail.com> +Marcus Smith <qwcode@gmail.com> +Mariatta <Mariatta@users.noreply.github.com> +Mark Kohler <mark.kohler@proteinsimple.com> +Mark Williams <markrwilliams@gmail.com> +Mark Williams <mrw@enotuniq.org> +Markus Hametner <fin+github@xbhd.org> +Masaki <mk5986@nyu.edu> +Masklinn <bitbucket.org@masklinn.net> +Matej Stuchlik <mstuchli@redhat.com> +Mathew Jennings <mjennings@foursquare.com> +Mathieu Bridon <bochecha@daitauha.fr> +Matt Good <matt@matt-good.net> +Matt Maker <trip@monstro.us> +Matt Robenolt <matt@ydekproductions.com> +matthew <matthew@trumbell.net> +Matthew Einhorn <moiein2000@gmail.com> +Matthew Gilliard <matthew.gilliard@gmail.com> +Matthew Iversen <teh.ivo@gmail.com> +Matthew Trumbell <matthew@thirdstonepartners.com> +Matthew Willson <matthew@swiftkey.com> +Matthias Bussonnier <bussonniermatthias@gmail.com> +mattip <matti.picus@gmail.com> +Maxim Kurnikov <maxim.kurnikov@gmail.com> +Maxime Rouyrre <rouyrre+git@gmail.com> +mayeut <mayeut@users.noreply.github.com> +mbaluna <44498973+mbaluna@users.noreply.github.com> +mdebi <17590103+mdebi@users.noreply.github.com> +memoselyk <memoselyk@gmail.com> +Michael <michael-k@users.noreply.github.com> +Michael Aquilina <michaelaquilina@gmail.com> +Michael E. Karpeles <michael.karpeles@gmail.com> +Michael Klich <michal@michalklich.com> +Michael Williamson <mike@zwobble.org> +michaelpacer <michaelpacer@gmail.com> +Mickaël Schoentgen <mschoentgen@nuxeo.com> +Miguel Araujo Perez <miguel.araujo.perez@gmail.com> +Mihir Singh <git.service@mihirsingh.com> +Mike <mikeh@blur.com> +Mike Hendricks <mikeh@blur.com> +Min RK <benjaminrk@gmail.com> +MinRK <benjaminrk@gmail.com> +Miro Hrončok <miro@hroncok.cz> +Monica Baluna <mbaluna@bloomberg.net> +montefra <franz.bergesund@gmail.com> +Monty Taylor <mordred@inaugust.com> +Nate Coraor <nate@bx.psu.edu> +Nathaniel J. Smith <njs@pobox.com> +Nehal J Wani <nehaljw.kkd1@gmail.com> +Neil Botelho <neil.botelho321@gmail.com> +Nick Coghlan <ncoghlan@gmail.com> +Nick Stenning <nick@whiteink.com> +Nick Timkovich <prometheus235@gmail.com> +Nicolas Bock <nicolasbock@gmail.com> +Nikhil Benesch <nikhil.benesch@gmail.com> +Nitesh Sharma <nbsharma@outlook.com> +Nowell Strite <nowell@strite.org> +NtaleGrey <Shadikntale@gmail.com> +nvdv <modestdev@gmail.com> +Ofekmeister <ofekmeister@gmail.com> +ofrinevo <ofrine@gmail.com> +Oliver Jeeves <oliver.jeeves@ocado.com> +Oliver Tonnhofer <olt@bogosoft.com> +Olivier Girardot <ssaboum@gmail.com> +Olivier Grisel <olivier.grisel@ensta.org> +Ollie Rutherfurd <orutherfurd@gmail.com> +OMOTO Kenji <k-omoto@m3.com> +Omry Yadan <omry@fb.com> +Oren Held <orenhe@il.ibm.com> +Oscar Benjamin <oscar.j.benjamin@gmail.com> +Oz N Tiram <oz.tiram@gmail.com> +Pachwenko <32424503+Pachwenko@users.noreply.github.com> +Patrick Dubroy <pdubroy@gmail.com> +Patrick Jenkins <patrick@socialgrowthtechnologies.com> +Patrick Lawson <pl@foursquare.com> +patricktokeeffe <patricktokeeffe@users.noreply.github.com> +Patrik Kopkan <pkopkan@redhat.com> +Paul Kehrer <paul.l.kehrer@gmail.com> +Paul Moore <p.f.moore@gmail.com> +Paul Nasrat <pnasrat@gmail.com> +Paul Oswald <pauloswald@gmail.com> +Paul van der Linden <mail@paultjuh.org> +Paulus Schoutsen <paulus@paulusschoutsen.nl> +Pavithra Eswaramoorthy <33131404+QueenCoffee@users.noreply.github.com> +Pawel Jasinski <pawel.jasinski@gmail.com> +Pekka Klärck <peke@iki.fi> +Peter Lisák <peter.lisak@showmax.com> +Peter Waller <peter.waller@gmail.com> +petr-tik <petr-tik@users.noreply.github.com> +Phaneendra Chiruvella <hi@pcx.io> +Phil Freo <phil@philfreo.com> +Phil Pennock <phil@pennock-tech.com> +Phil Whelan <phil123@gmail.com> +Philip Jägenstedt <philip@foolip.org> +Philip Molloy <pamolloy@users.noreply.github.com> +Philippe Ombredanne <pombredanne@gmail.com> +Pi Delport <pjdelport@gmail.com> +Pierre-Yves Rofes <github@rofes.fr> +pip <pypa-dev@googlegroups.com> +Prabakaran Kumaresshan <k_prabakaran+github@hotmail.com> +Prabhjyotsing Surjit Singh Sodhi <psinghsodhi@bloomberg.net> +Prabhu Marappan <prabhum.794@gmail.com> +Pradyun Gedam <pradyunsg@gmail.com> +Pratik Mallya <mallya@us.ibm.com> +Preet Thakkar <preet.thakkar@students.iiit.ac.in> +Preston Holmes <preston@ptone.com> +Przemek Wrzos <hetmankp@none> +Pulkit Goyal <7895pulkit@gmail.com> +Qiangning Hong <hongqn@gmail.com> +Quentin Pradet <quentin.pradet@gmail.com> +R. David Murray <rdmurray@bitdance.com> +Rafael Caricio <rafael.jacinto@gmail.com> +Ralf Schmitt <ralf@systemexit.de> +Razzi Abuissa <razzi53@gmail.com> +rdb <rdb@users.noreply.github.com> +Remi Rampin <r@remirampin.com> +Remi Rampin <remirampin@gmail.com> +Rene Dudfield <renesd@gmail.com> +Riccardo Magliocchetti <riccardo.magliocchetti@gmail.com> +Richard Jones <r1chardj0n3s@gmail.com> +RobberPhex <robberphex@gmail.com> +Robert Collins <rbtcollins@hp.com> +Robert McGibbon <rmcgibbo@gmail.com> +Robert T. McGibbon <rmcgibbo@gmail.com> +robin elisha robinson <elisha.rob@gmail.com> +Roey Berman <roey.berman@gmail.com> +Rohan Jain <crodjer@gmail.com> +Rohan Jain <crodjer@users.noreply.github.com> +Rohan Jain <mail@rohanjain.in> +Roman Bogorodskiy <roman.bogorodskiy@ericsson.com> +Romuald Brunet <romuald@chivil.com> +Ronny Pfannschmidt <Ronny.Pfannschmidt@gmx.de> +Rory McCann <rory@technomancy.org> +Ross Brattain <ross.b.brattain@intel.com> +Roy Wellington Ⅳ <cactus_hugged@yahoo.com> +Roy Wellington Ⅳ <roy@mybasis.com> +Ryan Wooden <rygwdn@gmail.com> +ryneeverett <ryneeverett@gmail.com> +Sachi King <nakato@nakato.io> +Salvatore Rinchiera <salvatore@rinchiera.com> +Savio Jomton <sajo240519@gmail.com> +schlamar <marc.schlaich@gmail.com> +Scott Kitterman <sklist@kitterman.com> +Sean <me@sean.taipei> +seanj <seanj@xyke.com> +Sebastian Jordan <sebastian.jordan.mail@googlemail.com> +Sebastian Schaetz <sschaetz@butterflynetinc.com> +Segev Finer <segev208@gmail.com> +SeongSoo Cho <ppiyakk2@printf.kr> +Sergey Vasilyev <nolar@nolar.info> +Seth Woodworth <seth@sethish.com> +Shlomi Fish <shlomif@shlomifish.org> +Shovan Maity <shovan.maity@mayadata.io> +Simeon Visser <svisser@users.noreply.github.com> +Simon Cross <hodgestar@gmail.com> +Simon Pichugin <simon.pichugin@gmail.com> +sinoroc <sinoroc.code+git@gmail.com> +Sorin Sbarnea <sorin.sbarnea@gmail.com> +Stavros Korokithakis <stavros@korokithakis.net> +Stefan Scherfke <stefan@sofa-rockers.org> +Stephan Erb <github@stephanerb.eu> +stepshal <nessento@openmailbox.org> +Steve (Gadget) Barnes <gadgetsteve@hotmail.com> +Steve Barnes <gadgetsteve@hotmail.com> +Steve Dower <steve.dower@microsoft.com> +Steve Kowalik <steven@wedontsleep.org> +Steven Myint <git@stevenmyint.com> +stonebig <stonebig34@gmail.com> +Stéphane Bidoul (ACSONE) <stephane.bidoul@acsone.eu> +Stéphane Bidoul <stephane.bidoul@acsone.eu> +Stéphane Klein <contact@stephane-klein.info> +Sumana Harihareswara <sh@changeset.nyc> +Sviatoslav Sydorenko <wk.cvs.github@sydorenko.org.ua> +Sviatoslav Sydorenko <wk@sydorenko.org.ua> +Swat009 <swatantra.kumar8@gmail.com> +Takayuki SHIMIZUKAWA <shimizukawa@gmail.com> +tbeswick <tbeswick@enphaseenergy.com> +Thijs Triemstra <info@collab.nl> +Thomas Fenzl <thomas.fenzl@gmail.com> +Thomas Grainger <tagrain@gmail.com> +Thomas Guettler <tguettler@tbz-pariv.de> +Thomas Johansson <devnull@localhost> +Thomas Kluyver <thomas@kluyver.me.uk> +Thomas Smith <smithtg@ncbi.nlm.nih.gov> +Tim D. Smith <github@tim-smith.us> +Tim Gates <tim.gates@iress.com> +Tim Harder <radhermit@gmail.com> +Tim Heap <tim@timheap.me> +tim smith <github@tim-smith.us> +tinruufu <tinruufu@gmail.com> +Tom Forbes <tom@tomforb.es> +Tom Freudenheim <tom.freudenheim@onepeloton.com> +Tom V <tom@viner.tv> +Tomas Orsava <torsava@redhat.com> +Tomer Chachamu <tomer.chachamu@gmail.com> +Tony Beswick <tonybeswick@orcon.net.nz> +Tony Zhaocheng Tan <tony@tonytan.io> +TonyBeswick <TonyBeswick@users.noreply.github.com> +toonarmycaptain <toonarmycaptain@hotmail.com> +Toshio Kuratomi <toshio@fedoraproject.org> +Travis Swicegood <development@domain51.com> +Tzu-ping Chung <uranusjr@gmail.com> +Valentin Haenel <valentin.haenel@gmx.de> +Victor Stinner <victor.stinner@gmail.com> +victorvpaulo <victorvpaulo@gmail.com> +Viktor Szépe <viktor@szepe.net> +Ville Skyttä <ville.skytta@iki.fi> +Vinay Sajip <vinay_sajip@yahoo.co.uk> +Vincent Philippon <sindaewoh@gmail.com> +Vinicyus Macedo <7549205+vinicyusmacedo@users.noreply.github.com> +Vitaly Babiy <vbabiy86@gmail.com> +Vladimir Rutsky <rutsky@users.noreply.github.com> +W. Trevor King <wking@drexel.edu> +Wil Tan <wil@dready.org> +Wilfred Hughes <me@wilfred.me.uk> +William ML Leslie <william.leslie.ttg@gmail.com> +William T Olson <trevor@heytrevor.com> +Wilson Mo <wilsonfv@126.com> +wim glenn <wim.glenn@gmail.com> +Wolfgang Maier <wolfgang.maier@biologie.uni-freiburg.de> +Xavier Fernandez <xav.fernandez@gmail.com> +Xavier Fernandez <xavier.fernandez@polyconseil.fr> +xoviat <xoviat@users.noreply.github.com> +xtreak <tir.karthi@gmail.com> +YAMAMOTO Takashi <yamamoto@midokura.com> +Yen Chi Hsuan <yan12125@gmail.com> +Yeray Diaz Diaz <yeraydiazdiaz@gmail.com> +Yoval P <yoval@gmx.com> +Yu Jian <askingyj@gmail.com> +Yuan Jing Vincent Yan <yyan82@bloomberg.net> +Zearin <zearin@gonk.net> +Zearin <Zearin@users.noreply.github.com> +Zhiping Deng <kofreestyler@gmail.com> +Zvezdan Petkovic <zpetkovic@acm.org> +Łukasz Langa <lukasz@langa.pl> +Семён Марьясин <simeon@maryasin.name> diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/LICENSE.txt b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..737fec5c5352af3d9a6a47a0670da4bdb52c5725 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) 2008-2019 The pip developers (see AUTHORS.txt file) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..4adf953086ea4e28c5236788234f38f88602296f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/METADATA @@ -0,0 +1,82 @@ +Metadata-Version: 2.1 +Name: setuptools +Version: 44.0.0 +Summary: Easily download, build, install, upgrade, and uninstall Python packages +Home-page: https://github.com/pypa/setuptools +Author: Python Packaging Authority +Author-email: distutils-sig@python.org +License: UNKNOWN +Project-URL: Documentation, https://setuptools.readthedocs.io/ +Keywords: CPAN PyPI distutils eggs package management +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Archiving :: Packaging +Classifier: Topic :: System :: Systems Administration +Classifier: Topic :: Utilities +Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7 +Description-Content-Type: text/x-rst; charset=UTF-8 + +.. image:: https://img.shields.io/pypi/v/setuptools.svg + :target: https://pypi.org/project/setuptools + +.. image:: https://img.shields.io/readthedocs/setuptools/latest.svg + :target: https://setuptools.readthedocs.io + +.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20CI&logo=travis&logoColor=white + :target: https://travis-ci.org/pypa/setuptools + +.. image:: https://img.shields.io/appveyor/ci/pypa/setuptools/master.svg?label=Windows%20CI&logo=appveyor&logoColor=white + :target: https://ci.appveyor.com/project/pypa/setuptools/branch/master + +.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg?logo=codecov&logoColor=white + :target: https://codecov.io/gh/pypa/setuptools + +.. image:: https://tidelift.com/badges/github/pypa/setuptools?style=flat + :target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme + +.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg + +See the `Installation Instructions +<https://packaging.python.org/installing/>`_ in the Python Packaging +User's Guide for instructions on installing, upgrading, and uninstalling +Setuptools. + +Questions and comments should be directed to the `distutils-sig +mailing list <http://mail.python.org/pipermail/distutils-sig/>`_. +Bug reports and especially tested patches may be +submitted directly to the `bug tracker +<https://github.com/pypa/setuptools/issues>`_. + +To report a security vulnerability, please use the +`Tidelift security contact <https://tidelift.com/security>`_. +Tidelift will coordinate the fix and disclosure. + + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +Setuptools and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more <https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=referral&utm_campaign=github>`_. + +Code of Conduct +=============== + +Everyone interacting in the setuptools project's codebases, issue trackers, +chat rooms, and mailing lists is expected to follow the +`PyPA Code of Conduct <https://www.pypa.io/en/latest/code-of-conduct/>`_. + + diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..ac0d578ec796dc7d1326c995daaee62994f98aac --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/RECORD @@ -0,0 +1,163 @@ +../../../bin/easy_install,sha256=P5-mcdOrDktPNyJdlPKEsXkzgH98NpksjbxorIhwffM,267 +../../../bin/easy_install-3.8,sha256=P5-mcdOrDktPNyJdlPKEsXkzgH98NpksjbxorIhwffM,267 +__pycache__/easy_install.cpython-38.pyc,, +easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 +setuptools-44.0.0.dist-info/AUTHORS.txt,sha256=RtqU9KfonVGhI48DAA4-yTOBUhBtQTjFhaDzHoyh7uU,21518 +setuptools-44.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +setuptools-44.0.0.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090 +setuptools-44.0.0.dist-info/METADATA,sha256=L93fcafgVw4xoJUNG0lehyy0prVj-jU_JFxRh0ZUtos,3523 +setuptools-44.0.0.dist-info/RECORD,, +setuptools-44.0.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +setuptools-44.0.0.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239 +setuptools-44.0.0.dist-info/entry_points.txt,sha256=ZmIqlp-SBdsBS2cuetmU2NdSOs4DG0kxctUR9UJ8Xk0,3150 +setuptools-44.0.0.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38 +setuptools-44.0.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +setuptools/__init__.py,sha256=WBpCcn2lvdckotabeae1TTYonPOcgCIF3raD2zRWzBc,7283 +setuptools/__pycache__/__init__.cpython-38.pyc,, +setuptools/__pycache__/_deprecation_warning.cpython-38.pyc,, +setuptools/__pycache__/_imp.cpython-38.pyc,, +setuptools/__pycache__/archive_util.cpython-38.pyc,, +setuptools/__pycache__/build_meta.cpython-38.pyc,, +setuptools/__pycache__/config.cpython-38.pyc,, +setuptools/__pycache__/dep_util.cpython-38.pyc,, +setuptools/__pycache__/depends.cpython-38.pyc,, +setuptools/__pycache__/dist.cpython-38.pyc,, +setuptools/__pycache__/errors.cpython-38.pyc,, +setuptools/__pycache__/extension.cpython-38.pyc,, +setuptools/__pycache__/glob.cpython-38.pyc,, +setuptools/__pycache__/installer.cpython-38.pyc,, +setuptools/__pycache__/launch.cpython-38.pyc,, +setuptools/__pycache__/lib2to3_ex.cpython-38.pyc,, +setuptools/__pycache__/monkey.cpython-38.pyc,, +setuptools/__pycache__/msvc.cpython-38.pyc,, +setuptools/__pycache__/namespaces.cpython-38.pyc,, +setuptools/__pycache__/package_index.cpython-38.pyc,, +setuptools/__pycache__/py27compat.cpython-38.pyc,, +setuptools/__pycache__/py31compat.cpython-38.pyc,, +setuptools/__pycache__/py33compat.cpython-38.pyc,, +setuptools/__pycache__/py34compat.cpython-38.pyc,, +setuptools/__pycache__/sandbox.cpython-38.pyc,, +setuptools/__pycache__/site-patch.cpython-38.pyc,, +setuptools/__pycache__/ssl_support.cpython-38.pyc,, +setuptools/__pycache__/unicode_utils.cpython-38.pyc,, +setuptools/__pycache__/version.cpython-38.pyc,, +setuptools/__pycache__/wheel.cpython-38.pyc,, +setuptools/__pycache__/windows_support.cpython-38.pyc,, +setuptools/_deprecation_warning.py,sha256=jU9-dtfv6cKmtQJOXN8nP1mm7gONw5kKEtiPtbwnZyI,218 +setuptools/_imp.py,sha256=jloslOkxrTKbobgemfP94YII0nhqiJzE1bRmCTZ1a5I,2223 +setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +setuptools/_vendor/__pycache__/__init__.cpython-38.pyc,, +setuptools/_vendor/__pycache__/ordered_set.cpython-38.pyc,, +setuptools/_vendor/__pycache__/pyparsing.cpython-38.pyc,, +setuptools/_vendor/__pycache__/six.cpython-38.pyc,, +setuptools/_vendor/ordered_set.py,sha256=dbaCcs27dyN9gnMWGF5nA_BrVn6Q-NrjKYJpV9_fgBs,15130 +setuptools/_vendor/packaging/__about__.py,sha256=CpuMSyh1V7adw8QMjWKkY3LtdqRUkRX4MgJ6nF4stM0,744 +setuptools/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562 +setuptools/_vendor/packaging/__pycache__/__about__.cpython-38.pyc,, +setuptools/_vendor/packaging/__pycache__/__init__.cpython-38.pyc,, +setuptools/_vendor/packaging/__pycache__/_compat.cpython-38.pyc,, +setuptools/_vendor/packaging/__pycache__/_structures.cpython-38.pyc,, +setuptools/_vendor/packaging/__pycache__/markers.cpython-38.pyc,, +setuptools/_vendor/packaging/__pycache__/requirements.cpython-38.pyc,, +setuptools/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc,, +setuptools/_vendor/packaging/__pycache__/tags.cpython-38.pyc,, +setuptools/_vendor/packaging/__pycache__/utils.cpython-38.pyc,, +setuptools/_vendor/packaging/__pycache__/version.cpython-38.pyc,, +setuptools/_vendor/packaging/_compat.py,sha256=Ugdm-qcneSchW25JrtMIKgUxfEEBcCAz6WrEeXeqz9o,865 +setuptools/_vendor/packaging/_structures.py,sha256=pVd90XcXRGwpZRB_qdFuVEibhCHpX_bL5zYr9-N0mc8,1416 +setuptools/_vendor/packaging/markers.py,sha256=-meFl9Fr9V8rF5Rduzgett5EHK9wBYRUqssAV2pj0lw,8268 +setuptools/_vendor/packaging/requirements.py,sha256=3dwIJekt8RRGCUbgxX8reeAbgmZYjb0wcCRtmH63kxI,4742 +setuptools/_vendor/packaging/specifiers.py,sha256=0ZzQpcUnvrQ6LjR-mQRLzMr8G6hdRv-mY0VSf_amFtI,27778 +setuptools/_vendor/packaging/tags.py,sha256=EPLXhO6GTD7_oiWEO1U0l0PkfR8R_xivpMDHXnsTlts,12933 +setuptools/_vendor/packaging/utils.py,sha256=VaTC0Ei7zO2xl9ARiWmz2YFLFt89PuuhLbAlXMyAGms,1520 +setuptools/_vendor/packaging/version.py,sha256=Npdwnb8OHedj_2L86yiUqscujb7w_i5gmSK1PhOAFzg,11978 +setuptools/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055 +setuptools/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +setuptools/archive_util.py,sha256=kw8Ib_lKjCcnPKNbS7h8HztRVK0d5RacU3r_KRdVnmM,6592 +setuptools/build_meta.py,sha256=-9Nmj9YdbW4zX3TssPJZhsENrTa4fw3k86Jm1cdKMik,9597 +setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 +setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752 +setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 +setuptools/command/__init__.py,sha256=QCAuA9whnq8Bnoc0bBaS6Lw_KAUO0DiHYZQXEMNn5hg,568 +setuptools/command/__pycache__/__init__.cpython-38.pyc,, +setuptools/command/__pycache__/alias.cpython-38.pyc,, +setuptools/command/__pycache__/bdist_egg.cpython-38.pyc,, +setuptools/command/__pycache__/bdist_rpm.cpython-38.pyc,, +setuptools/command/__pycache__/bdist_wininst.cpython-38.pyc,, +setuptools/command/__pycache__/build_clib.cpython-38.pyc,, +setuptools/command/__pycache__/build_ext.cpython-38.pyc,, +setuptools/command/__pycache__/build_py.cpython-38.pyc,, +setuptools/command/__pycache__/develop.cpython-38.pyc,, +setuptools/command/__pycache__/dist_info.cpython-38.pyc,, +setuptools/command/__pycache__/easy_install.cpython-38.pyc,, +setuptools/command/__pycache__/egg_info.cpython-38.pyc,, +setuptools/command/__pycache__/install.cpython-38.pyc,, +setuptools/command/__pycache__/install_egg_info.cpython-38.pyc,, +setuptools/command/__pycache__/install_lib.cpython-38.pyc,, +setuptools/command/__pycache__/install_scripts.cpython-38.pyc,, +setuptools/command/__pycache__/py36compat.cpython-38.pyc,, +setuptools/command/__pycache__/register.cpython-38.pyc,, +setuptools/command/__pycache__/rotate.cpython-38.pyc,, +setuptools/command/__pycache__/saveopts.cpython-38.pyc,, +setuptools/command/__pycache__/sdist.cpython-38.pyc,, +setuptools/command/__pycache__/setopt.cpython-38.pyc,, +setuptools/command/__pycache__/test.cpython-38.pyc,, +setuptools/command/__pycache__/upload.cpython-38.pyc,, +setuptools/command/__pycache__/upload_docs.cpython-38.pyc,, +setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 +setuptools/command/bdist_egg.py,sha256=nnfV8Ah8IRC_Ifv5Loa9FdxL66MVbyDXwy-foP810zM,18185 +setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 +setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 +setuptools/command/build_clib.py,sha256=bQ9aBr-5ZSO-9fGsGsDLz0mnnFteHUZnftVLkhvHDq0,4484 +setuptools/command/build_ext.py,sha256=Ib42YUGksBswm2mL5xmQPF6NeTA6HcqrvAtEgFCv32A,13019 +setuptools/command/build_py.py,sha256=yWyYaaS9F3o9JbIczn064A5g1C5_UiKRDxGaTqYbtLE,9596 +setuptools/command/develop.py,sha256=MQlnGS6uP19erK2JCNOyQYoYyquk3PADrqrrinqqLtA,8184 +setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960 +setuptools/command/easy_install.py,sha256=0lY8Agxe-7IgMtxgxFuOY1NrDlBzOUlpCKsvayXlTYY,89903 +setuptools/command/egg_info.py,sha256=0e_TXrMfpa8nGTO7GmJcmpPCMWzliZi6zt9aMchlumc,25578 +setuptools/command/install.py,sha256=8doMxeQEDoK4Eco0mO2WlXXzzp9QnsGJQ7Z7yWkZPG8,4705 +setuptools/command/install_egg_info.py,sha256=4zq_Ad3jE-EffParuyDEnvxU6efB-Xhrzdr8aB6Ln_8,3195 +setuptools/command/install_lib.py,sha256=9zdc-H5h6RPxjySRhOwi30E_WfcVva7gpfhZ5ata60w,5023 +setuptools/command/install_scripts.py,sha256=UD0rEZ6861mTYhIdzcsqKnUl8PozocXWl9VBQ1VTWnc,2439 +setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 +setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986 +setuptools/command/register.py,sha256=kk3DxXCb5lXTvqnhfwx2g6q7iwbUmgTyXUCaBooBOUk,468 +setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164 +setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 +setuptools/command/sdist.py,sha256=IL1LepD2h8qGKOFJ3rrQVbjNH_Q6ViD40l0QADr4MEU,8088 +setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085 +setuptools/command/test.py,sha256=u2kXngIIdSYqtvwFlHiN6Iye1IB4TU6uadB2uiV1szw,9602 +setuptools/command/upload.py,sha256=XT3YFVfYPAmA5qhGg0euluU98ftxRUW-PzKcODMLxUs,462 +setuptools/command/upload_docs.py,sha256=oXiGplM_cUKLwE4CWWw98RzCufAu8tBhMC97GegFcms,7311 +setuptools/config.py,sha256=6SB2OY3qcooOJmG_rsK_s0pKBsorBlDpfMJUyzjQIGk,20575 +setuptools/dep_util.py,sha256=fgixvC1R7sH3r13ktyf7N0FALoqEXL1cBarmNpSEoWg,935 +setuptools/depends.py,sha256=qt2RWllArRvhnm8lxsyRpcthEZYp4GHQgREl1q0LkFw,5517 +setuptools/dist.py,sha256=xtXaNsOsE32MwwQqErzgXJF7jsTQz9GYFRrwnPFQ0J0,49865 +setuptools/errors.py,sha256=MVOcv381HNSajDgEUWzOQ4J6B5BHCBMSjHfaWcEwA1o,524 +setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729 +setuptools/extern/__init__.py,sha256=4q9gtShB1XFP6CisltsyPqtcfTO6ZM9Lu1QBl3l-qmo,2514 +setuptools/extern/__pycache__/__init__.cpython-38.pyc,, +setuptools/glob.py,sha256=o75cHrOxYsvn854thSxE0x9k8JrKDuhP_rRXlVB00Q4,5084 +setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 +setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264 +setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 +setuptools/installer.py,sha256=TCFRonRo01I79zo-ucf3Ymhj8TenPlmhMijN916aaJs,5337 +setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787 +setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013 +setuptools/monkey.py,sha256=FGc9fffh7gAxMLFmJs2DW_OYWpBjkdbNS2n14UAK4NA,5264 +setuptools/msvc.py,sha256=8baJ6aYgCA4TRdWQQi185qB9dnU8FaP4wgpbmd7VODs,46751 +setuptools/namespaces.py,sha256=F0Nrbv8KCT2OrO7rwa03om4N4GZKAlnce-rr-cgDQa8,3199 +setuptools/package_index.py,sha256=rqhmbFUEf4WxndnKbtWmj_x8WCuZSuoCgA0K1syyCY8,40616 +setuptools/py27compat.py,sha256=tvmer0Tn-wk_JummCkoM22UIjpjL-AQ8uUiOaqTs8sI,1496 +setuptools/py31compat.py,sha256=h2rtZghOfwoGYd8sQ0-auaKiF3TcL3qX0bX3VessqcE,838 +setuptools/py33compat.py,sha256=SMF9Z8wnGicTOkU1uRNwZ_kz5Z_bj29PUBbqdqeeNsc,1330 +setuptools/py34compat.py,sha256=KYOd6ybRxjBW8NJmYD8t_UyyVmysppFXqHpFLdslGXU,245 +setuptools/sandbox.py,sha256=9UbwfEL5QY436oMI1LtFWohhoZ-UzwHvGyZjUH_qhkw,14276 +setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218 +setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 +setuptools/site-patch.py,sha256=OumkIHMuoSenRSW1382kKWI1VAwxNE86E5W8iDd34FY,2302 +setuptools/ssl_support.py,sha256=nLjPUBBw7RTTx6O4RJZ5eAMGgjJG8beiDbkFXDZpLuM,8493 +setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,996 +setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144 +setuptools/wheel.py,sha256=zct-SEj5_LoHg6XELt2cVRdulsUENenCdS1ekM7TlZA,8455 +setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..ef99c6cf3283b50a273ac4c6d009a0aa85597070 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/dependency_links.txt b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/dependency_links.txt new file mode 100644 index 0000000000000000000000000000000000000000..e87d02103ede91545d70783dd59653d183424b68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/dependency_links.txt @@ -0,0 +1,2 @@ +https://files.pythonhosted.org/packages/source/c/certifi/certifi-2016.9.26.tar.gz#md5=baa81e951a29958563689d868ef1064d +https://files.pythonhosted.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2 diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/entry_points.txt b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..0fed3f1d83f3eb690dddad3f050da3d3f021eb6a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/entry_points.txt @@ -0,0 +1,68 @@ +[console_scripts] +easy_install = setuptools.command.easy_install:main + +[distutils.commands] +alias = setuptools.command.alias:alias +bdist_egg = setuptools.command.bdist_egg:bdist_egg +bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm +bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst +build_clib = setuptools.command.build_clib:build_clib +build_ext = setuptools.command.build_ext:build_ext +build_py = setuptools.command.build_py:build_py +develop = setuptools.command.develop:develop +dist_info = setuptools.command.dist_info:dist_info +easy_install = setuptools.command.easy_install:easy_install +egg_info = setuptools.command.egg_info:egg_info +install = setuptools.command.install:install +install_egg_info = setuptools.command.install_egg_info:install_egg_info +install_lib = setuptools.command.install_lib:install_lib +install_scripts = setuptools.command.install_scripts:install_scripts +rotate = setuptools.command.rotate:rotate +saveopts = setuptools.command.saveopts:saveopts +sdist = setuptools.command.sdist:sdist +setopt = setuptools.command.setopt:setopt +test = setuptools.command.test:test +upload_docs = setuptools.command.upload_docs:upload_docs + +[distutils.setup_keywords] +convert_2to3_doctests = setuptools.dist:assert_string_list +dependency_links = setuptools.dist:assert_string_list +eager_resources = setuptools.dist:assert_string_list +entry_points = setuptools.dist:check_entry_points +exclude_package_data = setuptools.dist:check_package_data +extras_require = setuptools.dist:check_extras +include_package_data = setuptools.dist:assert_bool +install_requires = setuptools.dist:check_requirements +namespace_packages = setuptools.dist:check_nsp +package_data = setuptools.dist:check_package_data +packages = setuptools.dist:check_packages +python_requires = setuptools.dist:check_specifier +setup_requires = setuptools.dist:check_requirements +test_loader = setuptools.dist:check_importable +test_runner = setuptools.dist:check_importable +test_suite = setuptools.dist:check_test_suite +tests_require = setuptools.dist:check_requirements +use_2to3 = setuptools.dist:assert_bool +use_2to3_exclude_fixers = setuptools.dist:assert_string_list +use_2to3_fixers = setuptools.dist:assert_string_list +zip_safe = setuptools.dist:assert_bool + +[egg_info.writers] +PKG-INFO = setuptools.command.egg_info:write_pkg_info +dependency_links.txt = setuptools.command.egg_info:overwrite_arg +depends.txt = setuptools.command.egg_info:warn_depends_obsolete +eager_resources.txt = setuptools.command.egg_info:overwrite_arg +entry_points.txt = setuptools.command.egg_info:write_entries +namespace_packages.txt = setuptools.command.egg_info:overwrite_arg +requires.txt = setuptools.command.egg_info:write_requirements +top_level.txt = setuptools.command.egg_info:write_toplevel_names + +[setuptools.finalize_distribution_options] +2to3_doctests = setuptools.dist:Distribution._finalize_2to3_doctests +features = setuptools.dist:Distribution._finalize_feature_opts +keywords = setuptools.dist:Distribution._finalize_setup_keywords +parent_finalize = setuptools.dist:_Distribution.finalize_options + +[setuptools.installation] +eggsecutable = setuptools.command.easy_install:bootstrap + diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..4577c6a795e510bf7578236665f582c3770fb42e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/top_level.txt @@ -0,0 +1,3 @@ +easy_install +pkg_resources +setuptools diff --git a/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/zip-safe b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/zip-safe new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools-44.0.0.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__init__.py b/backend/test/lib/python3.8/site-packages/setuptools/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a71b2bbdc6170963a66959c48080c1dedc7bb703 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/__init__.py @@ -0,0 +1,228 @@ +"""Extensions to the 'distutils' for large or complex distributions""" + +import os +import sys +import functools +import distutils.core +import distutils.filelist +import re +from distutils.errors import DistutilsOptionError +from distutils.util import convert_path +from fnmatch import fnmatchcase + +from ._deprecation_warning import SetuptoolsDeprecationWarning + +from setuptools.extern.six import PY3, string_types +from setuptools.extern.six.moves import filter, map + +import setuptools.version +from setuptools.extension import Extension +from setuptools.dist import Distribution, Feature +from setuptools.depends import Require +from . import monkey + +__metaclass__ = type + + +__all__ = [ + 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', + 'SetuptoolsDeprecationWarning', + 'find_packages' +] + +if PY3: + __all__.append('find_namespace_packages') + +__version__ = setuptools.version.__version__ + +bootstrap_install_from = None + +# If we run 2to3 on .py files, should we also convert docstrings? +# Default: yes; assume that we can detect doctests reliably +run_2to3_on_doctests = True +# Standard package names for fixer packages +lib2to3_fixer_packages = ['lib2to3.fixes'] + + +class PackageFinder: + """ + Generate a list of all Python packages found within a directory + """ + + @classmethod + def find(cls, where='.', exclude=(), include=('*',)): + """Return a list all Python packages found within directory 'where' + + 'where' is the root directory which will be searched for packages. It + should be supplied as a "cross-platform" (i.e. URL-style) path; it will + be converted to the appropriate local path syntax. + + 'exclude' is a sequence of package names to exclude; '*' can be used + as a wildcard in the names, such that 'foo.*' will exclude all + subpackages of 'foo' (but not 'foo' itself). + + 'include' is a sequence of package names to include. If it's + specified, only the named packages will be included. If it's not + specified, all found packages will be included. 'include' can contain + shell style wildcard patterns just like 'exclude'. + """ + + return list(cls._find_packages_iter( + convert_path(where), + cls._build_filter('ez_setup', '*__pycache__', *exclude), + cls._build_filter(*include))) + + @classmethod + def _find_packages_iter(cls, where, exclude, include): + """ + All the packages found in 'where' that pass the 'include' filter, but + not the 'exclude' filter. + """ + for root, dirs, files in os.walk(where, followlinks=True): + # Copy dirs to iterate over it, then empty dirs. + all_dirs = dirs[:] + dirs[:] = [] + + for dir in all_dirs: + full_path = os.path.join(root, dir) + rel_path = os.path.relpath(full_path, where) + package = rel_path.replace(os.path.sep, '.') + + # Skip directory trees that are not valid packages + if ('.' in dir or not cls._looks_like_package(full_path)): + continue + + # Should this package be included? + if include(package) and not exclude(package): + yield package + + # Keep searching subdirectories, as there may be more packages + # down there, even if the parent was excluded. + dirs.append(dir) + + @staticmethod + def _looks_like_package(path): + """Does a directory look like a package?""" + return os.path.isfile(os.path.join(path, '__init__.py')) + + @staticmethod + def _build_filter(*patterns): + """ + Given a list of patterns, return a callable that will be true only if + the input matches at least one of the patterns. + """ + return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) + + +class PEP420PackageFinder(PackageFinder): + @staticmethod + def _looks_like_package(path): + return True + + +find_packages = PackageFinder.find + +if PY3: + find_namespace_packages = PEP420PackageFinder.find + + +def _install_setup_requires(attrs): + # Note: do not use `setuptools.Distribution` directly, as + # our PEP 517 backend patch `distutils.core.Distribution`. + dist = distutils.core.Distribution(dict( + (k, v) for k, v in attrs.items() + if k in ('dependency_links', 'setup_requires') + )) + # Honor setup.cfg's options. + dist.parse_config_files(ignore_option_errors=True) + if dist.setup_requires: + dist.fetch_build_eggs(dist.setup_requires) + + +def setup(**attrs): + # Make sure we have any requirements needed to interpret 'attrs'. + _install_setup_requires(attrs) + return distutils.core.setup(**attrs) + +setup.__doc__ = distutils.core.setup.__doc__ + + +_Command = monkey.get_unpatched(distutils.core.Command) + + +class Command(_Command): + __doc__ = _Command.__doc__ + + command_consumes_arguments = False + + def __init__(self, dist, **kw): + """ + Construct the command for dist, updating + vars(self) with any keyword parameters. + """ + _Command.__init__(self, dist) + vars(self).update(kw) + + def _ensure_stringlike(self, option, what, default=None): + val = getattr(self, option) + if val is None: + setattr(self, option, default) + return default + elif not isinstance(val, string_types): + raise DistutilsOptionError("'%s' must be a %s (got `%s`)" + % (option, what, val)) + return val + + def ensure_string_list(self, option): + r"""Ensure that 'option' is a list of strings. If 'option' is + currently a string, we split it either on /,\s*/ or /\s+/, so + "foo bar baz", "foo,bar,baz", and "foo, bar baz" all become + ["foo", "bar", "baz"]. + """ + val = getattr(self, option) + if val is None: + return + elif isinstance(val, string_types): + setattr(self, option, re.split(r',\s*|\s+', val)) + else: + if isinstance(val, list): + ok = all(isinstance(v, string_types) for v in val) + else: + ok = False + if not ok: + raise DistutilsOptionError( + "'%s' must be a list of strings (got %r)" + % (option, val)) + + def reinitialize_command(self, command, reinit_subcommands=0, **kw): + cmd = _Command.reinitialize_command(self, command, reinit_subcommands) + vars(cmd).update(kw) + return cmd + + +def _find_all_simple(path): + """ + Find all files under 'path' + """ + results = ( + os.path.join(base, file) + for base, dirs, files in os.walk(path, followlinks=True) + for file in files + ) + return filter(os.path.isfile, results) + + +def findall(dir=os.curdir): + """ + Find all files under 'dir' and return the list of full filenames. + Unless dir is '.', return full filenames with dir prepended. + """ + files = _find_all_simple(dir) + if dir == os.curdir: + make_rel = functools.partial(os.path.relpath, start=dir) + files = map(make_rel, files) + return list(files) + + +# Apply monkey patches +monkey.patch_all() diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..65dee0c3df3548b0576e9395d75eeb45a7c91f4f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9fc2241f17fae0ec20b91cc5d344dc50dc3125af Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/_deprecation_warning.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/_imp.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/_imp.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90d7ae80333e5e99c16873ebecad61aec443d782 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/_imp.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/archive_util.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/archive_util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9dea852db76d888ead1190aa084b794447d0ce66 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/archive_util.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/build_meta.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/build_meta.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3762a8793e6d19538119889aaffef688521bf27 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/build_meta.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/config.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/config.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..570089232004aa4fd51a07454c351d6a92fc6824 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/config.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/dep_util.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/dep_util.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2023dba0809db96a3b8ed3d5267203132689b9e8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/dep_util.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/depends.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/depends.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68cb42fe65915cea33c4965442255df94ebbbd5a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/depends.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/dist.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/dist.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bdcde7880a5676a468afbe7dde9177ab56f6a9ec Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/dist.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/errors.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/errors.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..075b3d2a0bd756bbdcc7b7a296263e9f83753b16 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/errors.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/extension.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/extension.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a7da3acced118ad9d98fa207fd2d2e17ea1a3061 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/extension.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/glob.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/glob.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc96307ff5e71e531cf92d3bfce7ec1e503f44f5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/glob.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/installer.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/installer.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c66207c369a8ba1fb4246ac99d430503f0043012 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/installer.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/launch.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/launch.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53ea19d3b81d891c4a23bfbe6fd2382050633204 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/launch.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/lib2to3_ex.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/lib2to3_ex.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..96e37107a70a9d2697d534754d3ece684c7ba3f5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/lib2to3_ex.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/monkey.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/monkey.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e15cc497583105e048312c1434744bf7045c21a1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/monkey.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/msvc.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/msvc.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..29c1e610e26e7d63db504620d7d70eb5166f66cd Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/msvc.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/namespaces.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/namespaces.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f52a22079cb11af711f0f2056a42ef289229f080 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/namespaces.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/package_index.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/package_index.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..327cffaedbc0177df2fb8529d3d9aa94aee0f897 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/package_index.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py27compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py27compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d16e2bd76a1b1f1dce3e851ff90afbf6656423e8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py27compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py31compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py31compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a8cd7b06cb97b21136be6ec5814f31754f89e9ed Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py31compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py33compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py33compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fc08a800e310a38b37b15c48f9ca6237eb670e92 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py33compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py34compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py34compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dbf83dc8820e8d1abea38ca39152a7bb998ec812 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/py34compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/sandbox.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/sandbox.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..755d99475bff4cc7c42b21456846da3e8695200a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/sandbox.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/site-patch.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/site-patch.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3c1dd41bf74e8364068d0f9dd86b970ae513f3dc Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/site-patch.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/ssl_support.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/ssl_support.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ab5ad5d49d1b0d4d8c4cf748619bf7b3e6491eeb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/ssl_support.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/unicode_utils.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/unicode_utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e059d6ab6c5f511d54deab22b1504195816054a0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/unicode_utils.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/version.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ff06ed0f40f2f32c8bc48171612c6fb71b84ca54 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/version.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/wheel.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/wheel.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fee12033c36ab91e9e2ecb9c57b8996731b65734 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/wheel.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/windows_support.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/windows_support.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..95fc02c497142341f3cc706af9dfc313098254c8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/__pycache__/windows_support.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_deprecation_warning.py b/backend/test/lib/python3.8/site-packages/setuptools/_deprecation_warning.py new file mode 100644 index 0000000000000000000000000000000000000000..086b64dd3817c0c1a194ffc1959eeffdd2695bef --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_deprecation_warning.py @@ -0,0 +1,7 @@ +class SetuptoolsDeprecationWarning(Warning): + """ + Base class for warning deprecations in ``setuptools`` + + This class is not derived from ``DeprecationWarning``, and as such is + visible by default. + """ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_imp.py b/backend/test/lib/python3.8/site-packages/setuptools/_imp.py new file mode 100644 index 0000000000000000000000000000000000000000..a3cce9b284b1e580c1715c5e300a18077d63e8ce --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_imp.py @@ -0,0 +1,73 @@ +""" +Re-implementation of find_module and get_frozen_object +from the deprecated imp module. +""" + +import os +import importlib.util +import importlib.machinery + +from .py34compat import module_from_spec + + +PY_SOURCE = 1 +PY_COMPILED = 2 +C_EXTENSION = 3 +C_BUILTIN = 6 +PY_FROZEN = 7 + + +def find_module(module, paths=None): + """Just like 'imp.find_module()', but with package support""" + spec = importlib.util.find_spec(module, paths) + if spec is None: + raise ImportError("Can't find %s" % module) + if not spec.has_location and hasattr(spec, 'submodule_search_locations'): + spec = importlib.util.spec_from_loader('__init__.py', spec.loader) + + kind = -1 + file = None + static = isinstance(spec.loader, type) + if spec.origin == 'frozen' or static and issubclass( + spec.loader, importlib.machinery.FrozenImporter): + kind = PY_FROZEN + path = None # imp compabilty + suffix = mode = '' # imp compability + elif spec.origin == 'built-in' or static and issubclass( + spec.loader, importlib.machinery.BuiltinImporter): + kind = C_BUILTIN + path = None # imp compabilty + suffix = mode = '' # imp compability + elif spec.has_location: + path = spec.origin + suffix = os.path.splitext(path)[1] + mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb' + + if suffix in importlib.machinery.SOURCE_SUFFIXES: + kind = PY_SOURCE + elif suffix in importlib.machinery.BYTECODE_SUFFIXES: + kind = PY_COMPILED + elif suffix in importlib.machinery.EXTENSION_SUFFIXES: + kind = C_EXTENSION + + if kind in {PY_SOURCE, PY_COMPILED}: + file = open(path, mode) + else: + path = None + suffix = mode = '' + + return file, path, (suffix, mode, kind) + + +def get_frozen_object(module, paths=None): + spec = importlib.util.find_spec(module, paths) + if not spec: + raise ImportError("Can't find %s" % module) + return spec.loader.get_code(module) + + +def get_module(module, paths, info): + spec = importlib.util.find_spec(module, paths) + if not spec: + raise ImportError("Can't find %s" % module) + return module_from_spec(spec) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__init__.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0aae55fc453d310a354443d224e1ce683c596596 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2d6ff7696425e1e335e0b7018f01007439b0703c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/ordered_set.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9782ed64da20425ee2f31c792a840267d6f3e14e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/pyparsing.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/six.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/six.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b59763f34cb2ea6715aa8d212981349931ca5906 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/__pycache__/six.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/ordered_set.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/ordered_set.py new file mode 100644 index 0000000000000000000000000000000000000000..14876000de895a609d5b9f3de39c3c8fc44ef1fc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/ordered_set.py @@ -0,0 +1,488 @@ +""" +An OrderedSet is a custom MutableSet that remembers its order, so that every +entry has an index that can be looked up. + +Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger, +and released under the MIT license. +""" +import itertools as it +from collections import deque + +try: + # Python 3 + from collections.abc import MutableSet, Sequence +except ImportError: + # Python 2.7 + from collections import MutableSet, Sequence + +SLICE_ALL = slice(None) +__version__ = "3.1" + + +def is_iterable(obj): + """ + Are we being asked to look up a list of things, instead of a single thing? + We check for the `__iter__` attribute so that this can cover types that + don't have to be known by this module, such as NumPy arrays. + + Strings, however, should be considered as atomic values to look up, not + iterables. The same goes for tuples, since they are immutable and therefore + valid entries. + + We don't need to check for the Python 2 `unicode` type, because it doesn't + have an `__iter__` attribute anyway. + """ + return ( + hasattr(obj, "__iter__") + and not isinstance(obj, str) + and not isinstance(obj, tuple) + ) + + +class OrderedSet(MutableSet, Sequence): + """ + An OrderedSet is a custom MutableSet that remembers its order, so that + every entry has an index that can be looked up. + + Example: + >>> OrderedSet([1, 1, 2, 3, 2]) + OrderedSet([1, 2, 3]) + """ + + def __init__(self, iterable=None): + self.items = [] + self.map = {} + if iterable is not None: + self |= iterable + + def __len__(self): + """ + Returns the number of unique elements in the ordered set + + Example: + >>> len(OrderedSet([])) + 0 + >>> len(OrderedSet([1, 2])) + 2 + """ + return len(self.items) + + def __getitem__(self, index): + """ + Get the item at a given index. + + If `index` is a slice, you will get back that slice of items, as a + new OrderedSet. + + If `index` is a list or a similar iterable, you'll get a list of + items corresponding to those indices. This is similar to NumPy's + "fancy indexing". The result is not an OrderedSet because you may ask + for duplicate indices, and the number of elements returned should be + the number of elements asked for. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset[1] + 2 + """ + if isinstance(index, slice) and index == SLICE_ALL: + return self.copy() + elif is_iterable(index): + return [self.items[i] for i in index] + elif hasattr(index, "__index__") or isinstance(index, slice): + result = self.items[index] + if isinstance(result, list): + return self.__class__(result) + else: + return result + else: + raise TypeError("Don't know how to index an OrderedSet by %r" % index) + + def copy(self): + """ + Return a shallow copy of this object. + + Example: + >>> this = OrderedSet([1, 2, 3]) + >>> other = this.copy() + >>> this == other + True + >>> this is other + False + """ + return self.__class__(self) + + def __getstate__(self): + if len(self) == 0: + # The state can't be an empty list. + # We need to return a truthy value, or else __setstate__ won't be run. + # + # This could have been done more gracefully by always putting the state + # in a tuple, but this way is backwards- and forwards- compatible with + # previous versions of OrderedSet. + return (None,) + else: + return list(self) + + def __setstate__(self, state): + if state == (None,): + self.__init__([]) + else: + self.__init__(state) + + def __contains__(self, key): + """ + Test if the item is in this ordered set + + Example: + >>> 1 in OrderedSet([1, 3, 2]) + True + >>> 5 in OrderedSet([1, 3, 2]) + False + """ + return key in self.map + + def add(self, key): + """ + Add `key` as an item to this OrderedSet, then return its index. + + If `key` is already in the OrderedSet, return the index it already + had. + + Example: + >>> oset = OrderedSet() + >>> oset.append(3) + 0 + >>> print(oset) + OrderedSet([3]) + """ + if key not in self.map: + self.map[key] = len(self.items) + self.items.append(key) + return self.map[key] + + append = add + + def update(self, sequence): + """ + Update the set with the given iterable sequence, then return the index + of the last element inserted. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.update([3, 1, 5, 1, 4]) + 4 + >>> print(oset) + OrderedSet([1, 2, 3, 5, 4]) + """ + item_index = None + try: + for item in sequence: + item_index = self.add(item) + except TypeError: + raise ValueError( + "Argument needs to be an iterable, got %s" % type(sequence) + ) + return item_index + + def index(self, key): + """ + Get the index of a given entry, raising an IndexError if it's not + present. + + `key` can be an iterable of entries that is not a string, in which case + this returns a list of indices. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.index(2) + 1 + """ + if is_iterable(key): + return [self.index(subkey) for subkey in key] + return self.map[key] + + # Provide some compatibility with pd.Index + get_loc = index + get_indexer = index + + def pop(self): + """ + Remove and return the last element from the set. + + Raises KeyError if the set is empty. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.pop() + 3 + """ + if not self.items: + raise KeyError("Set is empty") + + elem = self.items[-1] + del self.items[-1] + del self.map[elem] + return elem + + def discard(self, key): + """ + Remove an element. Do not raise an exception if absent. + + The MutableSet mixin uses this to implement the .remove() method, which + *does* raise an error when asked to remove a non-existent item. + + Example: + >>> oset = OrderedSet([1, 2, 3]) + >>> oset.discard(2) + >>> print(oset) + OrderedSet([1, 3]) + >>> oset.discard(2) + >>> print(oset) + OrderedSet([1, 3]) + """ + if key in self: + i = self.map[key] + del self.items[i] + del self.map[key] + for k, v in self.map.items(): + if v >= i: + self.map[k] = v - 1 + + def clear(self): + """ + Remove all items from this OrderedSet. + """ + del self.items[:] + self.map.clear() + + def __iter__(self): + """ + Example: + >>> list(iter(OrderedSet([1, 2, 3]))) + [1, 2, 3] + """ + return iter(self.items) + + def __reversed__(self): + """ + Example: + >>> list(reversed(OrderedSet([1, 2, 3]))) + [3, 2, 1] + """ + return reversed(self.items) + + def __repr__(self): + if not self: + return "%s()" % (self.__class__.__name__,) + return "%s(%r)" % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + """ + Returns true if the containers have the same items. If `other` is a + Sequence, then order is checked, otherwise it is ignored. + + Example: + >>> oset = OrderedSet([1, 3, 2]) + >>> oset == [1, 3, 2] + True + >>> oset == [1, 2, 3] + False + >>> oset == [2, 3] + False + >>> oset == OrderedSet([3, 2, 1]) + False + """ + # In Python 2 deque is not a Sequence, so treat it as one for + # consistent behavior with Python 3. + if isinstance(other, (Sequence, deque)): + # Check that this OrderedSet contains the same elements, in the + # same order, as the other object. + return list(self) == list(other) + try: + other_as_set = set(other) + except TypeError: + # If `other` can't be converted into a set, it's not equal. + return False + else: + return set(self) == other_as_set + + def union(self, *sets): + """ + Combines all unique items. + Each items order is defined by its first appearance. + + Example: + >>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0]) + >>> print(oset) + OrderedSet([3, 1, 4, 5, 2, 0]) + >>> oset.union([8, 9]) + OrderedSet([3, 1, 4, 5, 2, 0, 8, 9]) + >>> oset | {10} + OrderedSet([3, 1, 4, 5, 2, 0, 10]) + """ + cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet + containers = map(list, it.chain([self], sets)) + items = it.chain.from_iterable(containers) + return cls(items) + + def __and__(self, other): + # the parent implementation of this is backwards + return self.intersection(other) + + def intersection(self, *sets): + """ + Returns elements in common between all sets. Order is defined only + by the first set. + + Example: + >>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3]) + >>> print(oset) + OrderedSet([1, 2, 3]) + >>> oset.intersection([2, 4, 5], [1, 2, 3, 4]) + OrderedSet([2]) + >>> oset.intersection() + OrderedSet([1, 2, 3]) + """ + cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet + if sets: + common = set.intersection(*map(set, sets)) + items = (item for item in self if item in common) + else: + items = self + return cls(items) + + def difference(self, *sets): + """ + Returns all elements that are in this set but not the others. + + Example: + >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2])) + OrderedSet([1, 3]) + >>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3])) + OrderedSet([1]) + >>> OrderedSet([1, 2, 3]) - OrderedSet([2]) + OrderedSet([1, 3]) + >>> OrderedSet([1, 2, 3]).difference() + OrderedSet([1, 2, 3]) + """ + cls = self.__class__ + if sets: + other = set.union(*map(set, sets)) + items = (item for item in self if item not in other) + else: + items = self + return cls(items) + + def issubset(self, other): + """ + Report whether another set contains this set. + + Example: + >>> OrderedSet([1, 2, 3]).issubset({1, 2}) + False + >>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4}) + True + >>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5}) + False + """ + if len(self) > len(other): # Fast check for obvious cases + return False + return all(item in other for item in self) + + def issuperset(self, other): + """ + Report whether this set contains another set. + + Example: + >>> OrderedSet([1, 2]).issuperset([1, 2, 3]) + False + >>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3}) + True + >>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3}) + False + """ + if len(self) < len(other): # Fast check for obvious cases + return False + return all(item in self for item in other) + + def symmetric_difference(self, other): + """ + Return the symmetric difference of two OrderedSets as a new set. + That is, the new set will contain all elements that are in exactly + one of the sets. + + Their order will be preserved, with elements from `self` preceding + elements from `other`. + + Example: + >>> this = OrderedSet([1, 4, 3, 5, 7]) + >>> other = OrderedSet([9, 7, 1, 3, 2]) + >>> this.symmetric_difference(other) + OrderedSet([4, 5, 9, 2]) + """ + cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet + diff1 = cls(self).difference(other) + diff2 = cls(other).difference(self) + return diff1.union(diff2) + + def _update_items(self, items): + """ + Replace the 'items' list of this OrderedSet with a new one, updating + self.map accordingly. + """ + self.items = items + self.map = {item: idx for (idx, item) in enumerate(items)} + + def difference_update(self, *sets): + """ + Update this OrderedSet to remove items from one or more other sets. + + Example: + >>> this = OrderedSet([1, 2, 3]) + >>> this.difference_update(OrderedSet([2, 4])) + >>> print(this) + OrderedSet([1, 3]) + + >>> this = OrderedSet([1, 2, 3, 4, 5]) + >>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6])) + >>> print(this) + OrderedSet([3, 5]) + """ + items_to_remove = set() + for other in sets: + items_to_remove |= set(other) + self._update_items([item for item in self.items if item not in items_to_remove]) + + def intersection_update(self, other): + """ + Update this OrderedSet to keep only items in another set, preserving + their order in this set. + + Example: + >>> this = OrderedSet([1, 4, 3, 5, 7]) + >>> other = OrderedSet([9, 7, 1, 3, 2]) + >>> this.intersection_update(other) + >>> print(this) + OrderedSet([1, 3, 7]) + """ + other = set(other) + self._update_items([item for item in self.items if item in other]) + + def symmetric_difference_update(self, other): + """ + Update this OrderedSet to remove items from another set, then + add items from the other set that were not present in this set. + + Example: + >>> this = OrderedSet([1, 4, 3, 5, 7]) + >>> other = OrderedSet([9, 7, 1, 3, 2]) + >>> this.symmetric_difference_update(other) + >>> print(this) + OrderedSet([4, 5, 9, 2]) + """ + items_to_add = [item for item in other if item not in self] + items_to_remove = set(other) + self._update_items( + [item for item in self.items if item not in items_to_remove] + items_to_add + ) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__about__.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__about__.py new file mode 100644 index 0000000000000000000000000000000000000000..dc95138d049ba3194964d528b552a6d1514fa382 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__about__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "19.2" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD or Apache License, Version 2.0" +__copyright__ = "Copyright 2014-2019 %s" % __author__ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__init__.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a0cf67df5245be16a020ca048832e180f7ce8661 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +from .__about__ import ( + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + __version__, +) + +__all__ = [ + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", +] diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eeaf3666a5ee7096013699fe0881e61632a5ffbe Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5f69bdec5baa9d655a6e1c126d0b540f9e8ea52 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0b233443a1c4feaf8f6ef3126301583d5277f257 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f02438b3334f0875cbad2077a8d3debc55aa42eb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2f608f93ec59b58af018776799c4dd8da02cc5a3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..76e4c001b1323a3327ad22d72b68363c3229423c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..29e0fcfd7c54c6c9603d52a2e1d5c31598448aae Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9bedb9827d38e3995c52963384533b52113bbbba Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7fefb660320048b2778a610ba9d6ec0b3455915 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..038bfdcf4abebe8040e6e61df1591c39d02147bb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/_compat.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/_compat.py new file mode 100644 index 0000000000000000000000000000000000000000..25da473c196855ad59a6d2d785ef1ddef49795be --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/_compat.py @@ -0,0 +1,31 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import sys + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = (str,) +else: + string_types = (basestring,) + + +def with_metaclass(meta, *bases): + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + + return type.__new__(metaclass, "temporary_class", (), {}) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/_structures.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/_structures.py new file mode 100644 index 0000000000000000000000000000000000000000..68dcca634d8e3f0081bad2f9ae5e653a2942db68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/_structures.py @@ -0,0 +1,68 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + + +class Infinity(object): + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + + +Infinity = Infinity() + + +class NegativeInfinity(object): + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + + +NegativeInfinity = NegativeInfinity() diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/markers.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/markers.py new file mode 100644 index 0000000000000000000000000000000000000000..4bdfdb24f2096eac046bb9a576065bb96cfd476e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/markers.py @@ -0,0 +1,296 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import operator +import os +import platform +import sys + +from setuptools.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd +from setuptools.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString +from setuptools.extern.pyparsing import Literal as L # noqa + +from ._compat import string_types +from .specifiers import Specifier, InvalidSpecifier + + +__all__ = [ + "InvalidMarker", + "UndefinedComparison", + "UndefinedEnvironmentName", + "Marker", + "default_environment", +] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node(object): + def __init__(self, value): + self.value = value + + def __str__(self): + return str(self.value) + + def __repr__(self): + return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + + def serialize(self): + raise NotImplementedError + + +class Variable(Node): + def serialize(self): + return str(self) + + +class Value(Node): + def serialize(self): + return '"{0}"'.format(self) + + +class Op(Node): + def serialize(self): + return str(self) + + +VARIABLE = ( + L("implementation_version") + | L("platform_python_implementation") + | L("implementation_name") + | L("python_full_version") + | L("platform_release") + | L("platform_version") + | L("platform_machine") + | L("platform_system") + | L("python_version") + | L("sys_platform") + | L("os_name") + | L("os.name") + | L("sys.platform") # PEP-345 + | L("platform.version") # PEP-345 + | L("platform.machine") # PEP-345 + | L("platform.python_implementation") # PEP-345 + | L("python_implementation") # PEP-345 + | L("extra") # undocumented setuptools legacy +) +ALIASES = { + "os.name": "os_name", + "sys.platform": "sys_platform", + "platform.version": "platform_version", + "platform.machine": "platform_machine", + "platform.python_implementation": "platform_python_implementation", + "python_implementation": "platform_python_implementation", +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( + L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results): + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker(marker, first=True): + assert isinstance(marker, (list, tuple, string_types)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if ( + isinstance(marker, list) + and len(marker) == 1 + and isinstance(marker[0], (list, tuple)) + ): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return " ".join([m.serialize() for m in marker]) + else: + return marker + + +_operators = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs, op, rhs): + try: + spec = Specifier("".join([op.serialize(), rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper = _operators.get(op.serialize()) + if oper is None: + raise UndefinedComparison( + "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) + ) + + return oper(lhs, rhs) + + +_undefined = object() + + +def _get_env(environment, name): + value = environment.get(name, _undefined) + + if value is _undefined: + raise UndefinedEnvironmentName( + "{0!r} does not exist in evaluation environment.".format(name) + ) + + return value + + +def _evaluate_markers(markers, environment): + groups = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info): + version = "{0.major}.{0.minor}.{0.micro}".format(info) + kind = info.releaselevel + if kind != "final": + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + if hasattr(sys, "implementation"): + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + iver = "0" + implementation_name = "" + + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": ".".join(platform.python_version_tuple()[:2]), + "sys_platform": sys.platform, + } + + +class Marker(object): + def __init__(self, marker): + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( + marker, marker[e.loc : e.loc + 8] + ) + raise InvalidMarker(err_str) + + def __str__(self): + return _format_marker(self._markers) + + def __repr__(self): + return "<Marker({0!r})>".format(str(self)) + + def evaluate(self, environment=None): + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/requirements.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/requirements.py new file mode 100644 index 0000000000000000000000000000000000000000..8a0c2cb9be06e633b26c7205d6efe42827835910 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/requirements.py @@ -0,0 +1,138 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import string +import re + +from setuptools.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException +from setuptools.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine +from setuptools.extern.pyparsing import Literal as L # noqa +from setuptools.extern.six.moves.urllib import parse as urlparse + +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r"[^ ]+")("url") +URL = AT + URI + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine( + VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False +)("_raw_spec") +_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start : t._original_end]) +) +MARKER_SEPARATOR = SEMICOLON +MARKER = MARKER_SEPARATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd +# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see +# issue #104 +REQUIREMENT.parseString("x[]") + + +class Requirement(object): + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string): + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + 'Parse error at "{0!r}": {1}'.format( + requirement_string[e.loc : e.loc + 8], e.msg + ) + ) + + self.name = req.name + if req.url: + parsed_url = urlparse.urlparse(req.url) + if parsed_url.scheme == "file": + if urlparse.urlunparse(parsed_url) != req.url: + raise InvalidRequirement("Invalid URL given") + elif not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc + ): + raise InvalidRequirement("Invalid URL: {0}".format(req.url)) + self.url = req.url + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) + self.specifier = SpecifierSet(req.specifier) + self.marker = req.marker if req.marker else None + + def __str__(self): + parts = [self.name] + + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append("@ {0}".format(self.url)) + if self.marker: + parts.append(" ") + + if self.marker: + parts.append("; {0}".format(self.marker)) + + return "".join(parts) + + def __repr__(self): + return "<Requirement({0!r})>".format(str(self)) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/specifiers.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/specifiers.py new file mode 100644 index 0000000000000000000000000000000000000000..743576a080a0af8d0995f307ea6afc645b13ca61 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/specifiers.py @@ -0,0 +1,749 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from .version import Version, LegacyVersion, parse + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} + + def __init__(self, spec="", prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = (match.group("operator").strip(), match.group("version").strip()) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre) + + def __str__(self): + return "{0}{1}".format(*self._spec) + + def __hash__(self): + return hash(self._spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec == other._spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + return getattr(self, "_compare_{0}".format(self._operators[op])) + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + return self._spec[0] + + @property + def version(self): + return self._spec[1] + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + return self._get_operator(self.operator)(item, self.version) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if parsed_version.is_prerelease and not ( + prereleases or self.prereleases + ): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the beginning. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = r""" + (?P<operator>(==|!=|<=|>=|<|>)) + \s* + (?P<version> + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + + _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = r""" + (?P<operator>(~=|==|!=|<=|>=|<|>|===)) + (?P<version> + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?<!==|!=|~=) # We have special cases for these + # operators so we want to make sure they + # don't match here. + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + ) + """ + + _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "~=": "compatible", + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") and not x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( + prospective, prefix + ) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + prospective = prospective[: len(spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + spec, prospective = _pad_version(spec, prospective) + else: + # Convert our spec string into a Version + spec = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec.local: + prospective = Version(prospective.public) + + return prospective == spec + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return prospective <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is technically greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]) :]) + right_split.append(right[len(right_split[0]) :]) + + # Insert our padding + left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) + right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + + return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) + + +class SpecifierSet(BaseSpecifier): + def __init__(self, specifiers="", prereleases=None): + # Split on , to break each indidivual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<SpecifierSet({0!r}{1})>".format(str(self), pre) + + def __str__(self): + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + return len(self._specs) + + def __iter__(self): + return iter(self._specs) + + @property + def prereleases(self): + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all(s.contains(item, prereleases=prereleases) for s in self._specs) + + def filter(self, iterable, prereleases=None): + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] + found_prereleases = [] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/tags.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/tags.py new file mode 100644 index 0000000000000000000000000000000000000000..ec9942f0f6627f34554082a8c0909bc70bd2a260 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/tags.py @@ -0,0 +1,404 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import + +import distutils.util + +try: + from importlib.machinery import EXTENSION_SUFFIXES +except ImportError: # pragma: no cover + import imp + + EXTENSION_SUFFIXES = [x[0] for x in imp.get_suffixes()] + del imp +import platform +import re +import sys +import sysconfig +import warnings + + +INTERPRETER_SHORT_NAMES = { + "python": "py", # Generic. + "cpython": "cp", + "pypy": "pp", + "ironpython": "ip", + "jython": "jy", +} + + +_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + + +class Tag(object): + + __slots__ = ["_interpreter", "_abi", "_platform"] + + def __init__(self, interpreter, abi, platform): + self._interpreter = interpreter.lower() + self._abi = abi.lower() + self._platform = platform.lower() + + @property + def interpreter(self): + return self._interpreter + + @property + def abi(self): + return self._abi + + @property + def platform(self): + return self._platform + + def __eq__(self, other): + return ( + (self.platform == other.platform) + and (self.abi == other.abi) + and (self.interpreter == other.interpreter) + ) + + def __hash__(self): + return hash((self._interpreter, self._abi, self._platform)) + + def __str__(self): + return "{}-{}-{}".format(self._interpreter, self._abi, self._platform) + + def __repr__(self): + return "<{self} @ {self_id}>".format(self=self, self_id=id(self)) + + +def parse_tag(tag): + tags = set() + interpreters, abis, platforms = tag.split("-") + for interpreter in interpreters.split("."): + for abi in abis.split("."): + for platform_ in platforms.split("."): + tags.add(Tag(interpreter, abi, platform_)) + return frozenset(tags) + + +def _normalize_string(string): + return string.replace(".", "_").replace("-", "_") + + +def _cpython_interpreter(py_version): + # TODO: Is using py_version_nodot for interpreter version critical? + return "cp{major}{minor}".format(major=py_version[0], minor=py_version[1]) + + +def _cpython_abis(py_version): + abis = [] + version = "{}{}".format(*py_version[:2]) + debug = pymalloc = ucs4 = "" + with_debug = sysconfig.get_config_var("Py_DEBUG") + has_refcount = hasattr(sys, "gettotalrefcount") + # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled + # extension modules is the best option. + # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 + has_ext = "_d.pyd" in EXTENSION_SUFFIXES + if with_debug or (with_debug is None and (has_refcount or has_ext)): + debug = "d" + if py_version < (3, 8): + with_pymalloc = sysconfig.get_config_var("WITH_PYMALLOC") + if with_pymalloc or with_pymalloc is None: + pymalloc = "m" + if py_version < (3, 3): + unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE") + if unicode_size == 4 or ( + unicode_size is None and sys.maxunicode == 0x10FFFF + ): + ucs4 = "u" + elif debug: + # Debug builds can also load "normal" extension modules. + # We can also assume no UCS-4 or pymalloc requirement. + abis.append("cp{version}".format(version=version)) + abis.insert( + 0, + "cp{version}{debug}{pymalloc}{ucs4}".format( + version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 + ), + ) + return abis + + +def _cpython_tags(py_version, interpreter, abis, platforms): + for abi in abis: + for platform_ in platforms: + yield Tag(interpreter, abi, platform_) + for tag in (Tag(interpreter, "abi3", platform_) for platform_ in platforms): + yield tag + for tag in (Tag(interpreter, "none", platform_) for platform_ in platforms): + yield tag + # PEP 384 was first implemented in Python 3.2. + for minor_version in range(py_version[1] - 1, 1, -1): + for platform_ in platforms: + interpreter = "cp{major}{minor}".format( + major=py_version[0], minor=minor_version + ) + yield Tag(interpreter, "abi3", platform_) + + +def _pypy_interpreter(): + return "pp{py_major}{pypy_major}{pypy_minor}".format( + py_major=sys.version_info[0], + pypy_major=sys.pypy_version_info.major, + pypy_minor=sys.pypy_version_info.minor, + ) + + +def _generic_abi(): + abi = sysconfig.get_config_var("SOABI") + if abi: + return _normalize_string(abi) + else: + return "none" + + +def _pypy_tags(py_version, interpreter, abi, platforms): + for tag in (Tag(interpreter, abi, platform) for platform in platforms): + yield tag + for tag in (Tag(interpreter, "none", platform) for platform in platforms): + yield tag + + +def _generic_tags(interpreter, py_version, abi, platforms): + for tag in (Tag(interpreter, abi, platform) for platform in platforms): + yield tag + if abi != "none": + tags = (Tag(interpreter, "none", platform_) for platform_ in platforms) + for tag in tags: + yield tag + + +def _py_interpreter_range(py_version): + """ + Yield Python versions in descending order. + + After the latest version, the major-only version will be yielded, and then + all following versions up to 'end'. + """ + yield "py{major}{minor}".format(major=py_version[0], minor=py_version[1]) + yield "py{major}".format(major=py_version[0]) + for minor in range(py_version[1] - 1, -1, -1): + yield "py{major}{minor}".format(major=py_version[0], minor=minor) + + +def _independent_tags(interpreter, py_version, platforms): + """ + Return the sequence of tags that are consistent across implementations. + + The tags consist of: + - py*-none-<platform> + - <interpreter>-none-any + - py*-none-any + """ + for version in _py_interpreter_range(py_version): + for platform_ in platforms: + yield Tag(version, "none", platform_) + yield Tag(interpreter, "none", "any") + for version in _py_interpreter_range(py_version): + yield Tag(version, "none", "any") + + +def _mac_arch(arch, is_32bit=_32_BIT_INTERPRETER): + if not is_32bit: + return arch + + if arch.startswith("ppc"): + return "ppc" + + return "i386" + + +def _mac_binary_formats(version, cpu_arch): + formats = [cpu_arch] + if cpu_arch == "x86_64": + if version < (10, 4): + return [] + formats.extend(["intel", "fat64", "fat32"]) + + elif cpu_arch == "i386": + if version < (10, 4): + return [] + formats.extend(["intel", "fat32", "fat"]) + + elif cpu_arch == "ppc64": + # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? + if version > (10, 5) or version < (10, 4): + return [] + formats.append("fat64") + + elif cpu_arch == "ppc": + if version > (10, 6): + return [] + formats.extend(["fat32", "fat"]) + + formats.append("universal") + return formats + + +def _mac_platforms(version=None, arch=None): + version_str, _, cpu_arch = platform.mac_ver() + if version is None: + version = tuple(map(int, version_str.split(".")[:2])) + if arch is None: + arch = _mac_arch(cpu_arch) + platforms = [] + for minor_version in range(version[1], -1, -1): + compat_version = version[0], minor_version + binary_formats = _mac_binary_formats(compat_version, arch) + for binary_format in binary_formats: + platforms.append( + "macosx_{major}_{minor}_{binary_format}".format( + major=compat_version[0], + minor=compat_version[1], + binary_format=binary_format, + ) + ) + return platforms + + +# From PEP 513. +def _is_manylinux_compatible(name, glibc_version): + # Check for presence of _manylinux module. + try: + import _manylinux + + return bool(getattr(_manylinux, name + "_compatible")) + except (ImportError, AttributeError): + # Fall through to heuristic check below. + pass + + return _have_compatible_glibc(*glibc_version) + + +def _glibc_version_string(): + # Returns glibc version string, or None if not using glibc. + import ctypes + + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + process_namespace = ctypes.CDLL(None) + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return None + + # Call gnu_get_libc_version, which returns a string like "2.5" + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + return version_str + + +# Separated out from have_compatible_glibc for easier unit testing. +def _check_glibc_version(version_str, required_major, minimum_minor): + # Parse string and check against requested version. + # + # We use a regexp instead of str.split because we want to discard any + # random junk that might come after the minor version -- this might happen + # in patched/forked versions of glibc (e.g. Linaro's version of glibc + # uses version strings like "2.20-2014.11"). See gh-3588. + m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) + if not m: + warnings.warn( + "Expected glibc version with 2 components major.minor," + " got: %s" % version_str, + RuntimeWarning, + ) + return False + return ( + int(m.group("major")) == required_major + and int(m.group("minor")) >= minimum_minor + ) + + +def _have_compatible_glibc(required_major, minimum_minor): + version_str = _glibc_version_string() + if version_str is None: + return False + return _check_glibc_version(version_str, required_major, minimum_minor) + + +def _linux_platforms(is_32bit=_32_BIT_INTERPRETER): + linux = _normalize_string(distutils.util.get_platform()) + if linux == "linux_x86_64" and is_32bit: + linux = "linux_i686" + manylinux_support = ( + ("manylinux2014", (2, 17)), # CentOS 7 w/ glibc 2.17 (PEP 599) + ("manylinux2010", (2, 12)), # CentOS 6 w/ glibc 2.12 (PEP 571) + ("manylinux1", (2, 5)), # CentOS 5 w/ glibc 2.5 (PEP 513) + ) + manylinux_support_iter = iter(manylinux_support) + for name, glibc_version in manylinux_support_iter: + if _is_manylinux_compatible(name, glibc_version): + platforms = [linux.replace("linux", name)] + break + else: + platforms = [] + # Support for a later manylinux implies support for an earlier version. + platforms += [linux.replace("linux", name) for name, _ in manylinux_support_iter] + platforms.append(linux) + return platforms + + +def _generic_platforms(): + platform = _normalize_string(distutils.util.get_platform()) + return [platform] + + +def _interpreter_name(): + name = platform.python_implementation().lower() + return INTERPRETER_SHORT_NAMES.get(name) or name + + +def _generic_interpreter(name, py_version): + version = sysconfig.get_config_var("py_version_nodot") + if not version: + version = "".join(map(str, py_version[:2])) + return "{name}{version}".format(name=name, version=version) + + +def sys_tags(): + """ + Returns the sequence of tag triples for the running interpreter. + + The order of the sequence corresponds to priority order for the + interpreter, from most to least important. + """ + py_version = sys.version_info[:2] + interpreter_name = _interpreter_name() + if platform.system() == "Darwin": + platforms = _mac_platforms() + elif platform.system() == "Linux": + platforms = _linux_platforms() + else: + platforms = _generic_platforms() + + if interpreter_name == "cp": + interpreter = _cpython_interpreter(py_version) + abis = _cpython_abis(py_version) + for tag in _cpython_tags(py_version, interpreter, abis, platforms): + yield tag + elif interpreter_name == "pp": + interpreter = _pypy_interpreter() + abi = _generic_abi() + for tag in _pypy_tags(py_version, interpreter, abi, platforms): + yield tag + else: + interpreter = _generic_interpreter(interpreter_name, py_version) + abi = _generic_abi() + for tag in _generic_tags(interpreter, py_version, abi, platforms): + yield tag + for tag in _independent_tags(interpreter, py_version, platforms): + yield tag diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/utils.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..88418786933b8bc5f6179b8e191f60f79efd7074 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/utils.py @@ -0,0 +1,57 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re + +from .version import InvalidVersion, Version + + +_canonicalize_regex = re.compile(r"[-_.]+") + + +def canonicalize_name(name): + # This is taken from PEP 503. + return _canonicalize_regex.sub("-", name).lower() + + +def canonicalize_version(version): + """ + This is very similar to Version.__str__, but has one subtle differences + with the way it handles the release segment. + """ + + try: + version = Version(version) + except InvalidVersion: + # Legacy versions cannot be normalized + return version + + parts = [] + + # Epoch + if version.epoch != 0: + parts.append("{0}!".format(version.epoch)) + + # Release segment + # NB: This strips trailing '.0's to normalize + parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) + + # Pre-release + if version.pre is not None: + parts.append("".join(str(x) for x in version.pre)) + + # Post-release + if version.post is not None: + parts.append(".post{0}".format(version.post)) + + # Development release + if version.dev is not None: + parts.append(".dev{0}".format(version.dev)) + + # Local version segment + if version.local is not None: + parts.append("+{0}".format(version.local)) + + return "".join(parts) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/version.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/version.py new file mode 100644 index 0000000000000000000000000000000000000000..95157a1f78c26829ffbe1bd2463f7735b636d16f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/packaging/version.py @@ -0,0 +1,420 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] + + +_Version = collections.namedtuple( + "_Version", ["epoch", "release", "dev", "pre", "post", "local"] +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "<LegacyVersion({0})>".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def epoch(self): + return -1 + + @property + def release(self): + return None + + @property + def pre(self): + return None + + @property + def post(self): + return None + + @property + def dev(self): + return None + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + @property + def is_devrelease(self): + return False + + +_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) + +_legacy_version_replacement_map = { + "pre": "c", + "preview": "c", + "-": "final-", + "rc": "c", + "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P<epoch>[0-9]+)!)? # epoch + (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment + (?P<pre> # pre-release + [-_\.]? + (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) + [-_\.]? + (?P<pre_n>[0-9]+)? + )? + (?P<post> # post release + (?:-(?P<post_n1>[0-9]+)) + | + (?: + [-_\.]? + (?P<post_l>post|rev|r) + [-_\.]? + (?P<post_n2>[0-9]+)? + ) + )? + (?P<dev> # dev release + [-_\.]? + (?P<dev_l>dev) + [-_\.]? + (?P<dev_n>[0-9]+)? + )? + ) + (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version +""" + + +class Version(_BaseVersion): + + _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) + + def __init__(self, version): + # Validate the version and parse it into pieces + match = self._regex.search(version) + if not match: + raise InvalidVersion("Invalid version: '{0}'".format(version)) + + # Store the parsed out pieces of the version + self._version = _Version( + epoch=int(match.group("epoch")) if match.group("epoch") else 0, + release=tuple(int(i) for i in match.group("release").split(".")), + pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")), + post=_parse_letter_version( + match.group("post_l"), match.group("post_n1") or match.group("post_n2") + ), + dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")), + local=_parse_local_version(match.group("local")), + ) + + # Generate a key which will be used for sorting + self._key = _cmpkey( + self._version.epoch, + self._version.release, + self._version.pre, + self._version.post, + self._version.dev, + self._version.local, + ) + + def __repr__(self): + return "<Version({0})>".format(repr(str(self))) + + def __str__(self): + parts = [] + + # Epoch + if self.epoch != 0: + parts.append("{0}!".format(self.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self.release)) + + # Pre-release + if self.pre is not None: + parts.append("".join(str(x) for x in self.pre)) + + # Post-release + if self.post is not None: + parts.append(".post{0}".format(self.post)) + + # Development release + if self.dev is not None: + parts.append(".dev{0}".format(self.dev)) + + # Local version segment + if self.local is not None: + parts.append("+{0}".format(self.local)) + + return "".join(parts) + + @property + def epoch(self): + return self._version.epoch + + @property + def release(self): + return self._version.release + + @property + def pre(self): + return self._version.pre + + @property + def post(self): + return self._version.post[1] if self._version.post else None + + @property + def dev(self): + return self._version.dev[1] if self._version.dev else None + + @property + def local(self): + if self._version.local: + return ".".join(str(x) for x in self._version.local) + else: + return None + + @property + def public(self): + return str(self).split("+", 1)[0] + + @property + def base_version(self): + parts = [] + + # Epoch + if self.epoch != 0: + parts.append("{0}!".format(self.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self.release)) + + return "".join(parts) + + @property + def is_prerelease(self): + return self.dev is not None or self.pre is not None + + @property + def is_postrelease(self): + return self.post is not None + + @property + def is_devrelease(self): + return self.dev is not None + + +def _parse_letter_version(letter, number): + if letter: + # We consider there to be an implicit 0 in a pre-release if there is + # not a numeral associated with it. + if number is None: + number = 0 + + # We normalize any letters to their lower case form + letter = letter.lower() + + # We consider some words to be alternate spellings of other words and + # in those cases we want to normalize the spellings to our preferred + # spelling. + if letter == "alpha": + letter = "a" + elif letter == "beta": + letter = "b" + elif letter in ["c", "pre", "preview"]: + letter = "rc" + elif letter in ["rev", "r"]: + letter = "post" + + return letter, int(number) + if not letter and number: + # We assume if we are given a number, but we are not given a letter + # then this is using the implicit post release syntax (e.g. 1.0-1) + letter = "post" + + return letter, int(number) + + +_local_version_separators = re.compile(r"[\._-]") + + +def _parse_local_version(local): + """ + Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). + """ + if local is not None: + return tuple( + part.lower() if not part.isdigit() else int(part) + for part in _local_version_separators.split(local) + ) + + +def _cmpkey(epoch, release, pre, post, dev, local): + # When we compare a release version, we want to compare it with all of the + # trailing zeros removed. So we'll use a reverse the list, drop all the now + # leading zeros until we come to something non zero, then take the rest + # re-reverse it back into the correct order and make it a tuple and use + # that for our sorting key. + release = tuple( + reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))) + ) + + # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. + # We'll do this by abusing the pre segment, but we _only_ want to do this + # if there is not a pre or a post segment. If we have one of those then + # the normal sorting rules will handle this case correctly. + if pre is None and post is None and dev is not None: + pre = -Infinity + # Versions without a pre-release (except as noted above) should sort after + # those with one. + elif pre is None: + pre = Infinity + + # Versions without a post segment should sort before those with one. + if post is None: + post = -Infinity + + # Versions without a development segment should sort after those with one. + if dev is None: + dev = Infinity + + if local is None: + # Versions without a local segment should sort before those with one. + local = -Infinity + else: + # Versions with a local segment need that segment parsed to implement + # the sorting rules in PEP440. + # - Alpha numeric segments sort before numeric segments + # - Alpha numeric segments sort lexicographically + # - Numeric segments sort numerically + # - Shorter versions sort before longer versions when the prefixes + # match exactly + local = tuple((i, "") if isinstance(i, int) else (-Infinity, i) for i in local) + + return epoch, release, pre, post, dev, local diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py new file mode 100644 index 0000000000000000000000000000000000000000..cf75e1e5fcbfe7eac41d2a9e446c5c980741087b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/pyparsing.py @@ -0,0 +1,5742 @@ +# module pyparsing.py +# +# Copyright (c) 2003-2018 Paul T. McGuire +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = \ +""" +pyparsing module - Classes and methods to define and execute parsing grammars +============================================================================= + +The pyparsing module is an alternative approach to creating and executing simple grammars, +vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you +don't need to learn a new syntax for defining grammars or matching expressions - the parsing module +provides a library of classes that you use to construct the grammar directly in Python. + +Here is a program to parse "Hello, World!" (or any greeting of the form +C{"<salutation>, <addressee>!"}), built up using L{Word}, L{Literal}, and L{And} elements +(L{'+'<ParserElement.__add__>} operator gives L{And} expressions, strings are auto-converted to +L{Literal} expressions):: + + from pyparsing import Word, alphas + + # define grammar of a greeting + greet = Word(alphas) + "," + Word(alphas) + "!" + + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + +The program outputs the following:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + +The Python representation of the grammar is quite readable, owing to the self-explanatory +class names, and the use of '+', '|' and '^' operators. + +The L{ParseResults} object returned from L{ParserElement.parseString<ParserElement.parseString>} can be accessed as a nested list, a dictionary, or an +object with named attributes. + +The pyparsing module handles some of the problems that are typically vexing when writing text parsers: + - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) + - quoted strings + - embedded comments + + +Getting Started - +----------------- +Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing +classes inherit from. Use the docstrings for examples of how to: + - construct literal match expressions from L{Literal} and L{CaselessLiteral} classes + - construct character word-group expressions using the L{Word} class + - see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes + - use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones + - associate names with your parsed results using L{ParserElement.setResultsName} + - find some helpful expression short-cuts like L{delimitedList} and L{oneOf} + - find more useful common expressions in the L{pyparsing_common} namespace class +""" + +__version__ = "2.2.1" +__versionTime__ = "18 Sep 2018 00:49 UTC" +__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" + +import string +from weakref import ref as wkref +import copy +import sys +import warnings +import re +import sre_constants +import collections +import pprint +import traceback +import types +from datetime import datetime + +try: + from _thread import RLock +except ImportError: + from threading import RLock + +try: + # Python 3 + from collections.abc import Iterable + from collections.abc import MutableMapping +except ImportError: + # Python 2.7 + from collections import Iterable + from collections import MutableMapping + +try: + from collections import OrderedDict as _OrderedDict +except ImportError: + try: + from ordereddict import OrderedDict as _OrderedDict + except ImportError: + _OrderedDict = None + +#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) + +__all__ = [ +'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', +'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', +'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', +'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', +'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', +'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', +'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', +'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', +'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', +'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', +'htmlComment', 'javaStyleComment', 'line', 'lineEnd', 'lineStart', 'lineno', +'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', +'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', +'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', +'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', +'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', +'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', +'CloseMatch', 'tokenMap', 'pyparsing_common', +] + +system_version = tuple(sys.version_info)[:3] +PY_3 = system_version[0] == 3 +if PY_3: + _MAX_INT = sys.maxsize + basestring = str + unichr = chr + _ustr = str + + # build list of single arg builtins, that can be used as parse actions + singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] + +else: + _MAX_INT = sys.maxint + range = xrange + + def _ustr(obj): + """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries + str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It + then < returns the unicode object | encodes it with the default encoding | ... >. + """ + if isinstance(obj,unicode): + return obj + + try: + # If this works, then _ustr(obj) has the same behaviour as str(obj), so + # it won't break any existing code. + return str(obj) + + except UnicodeEncodeError: + # Else encode it + ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') + xmlcharref = Regex(r'&#\d+;') + xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) + return xmlcharref.transformString(ret) + + # build list of single arg builtins, tolerant of Python version, that can be used as parse actions + singleArgBuiltins = [] + import __builtin__ + for fname in "sum len sorted reversed list tuple set any all min max".split(): + try: + singleArgBuiltins.append(getattr(__builtin__,fname)) + except AttributeError: + continue + +_generatorType = type((y for y in range(1))) + +def _xml_escape(data): + """Escape &, <, >, ", ', etc. in a string of data.""" + + # ampersand must be replaced first + from_symbols = '&><"\'' + to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) + for from_,to_ in zip(from_symbols, to_symbols): + data = data.replace(from_, to_) + return data + +class _Constants(object): + pass + +alphas = string.ascii_uppercase + string.ascii_lowercase +nums = "0123456789" +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) +printables = "".join(c for c in string.printable if c not in string.whitespace) + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, pstr, loc=0, msg=None, elem=None ): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parserElement = elem + self.args = (pstr, loc, msg) + + @classmethod + def _from_exception(cls, pe): + """ + internal factory method to simplify creating one type of ParseException + from another - avoids having __init__ signature conflicts among subclasses + """ + return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) + + def __getattr__( self, aname ): + """supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + if( aname == "lineno" ): + return lineno( self.loc, self.pstr ) + elif( aname in ("col", "column") ): + return col( self.loc, self.pstr ) + elif( aname == "line" ): + return line( self.loc, self.pstr ) + else: + raise AttributeError(aname) + + def __str__( self ): + return "%s (at char %d), (line:%d, col:%d)" % \ + ( self.msg, self.loc, self.lineno, self.column ) + def __repr__( self ): + return _ustr(self) + def markInputline( self, markerString = ">!<" ): + """Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join((line_str[:line_column], + markerString, line_str[line_column:])) + return line_str.strip() + def __dir__(self): + return "lineno col line".split() + dir(type(self)) + +class ParseException(ParseBaseException): + """ + Exception thrown when parse expressions don't match class; + supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + + Example:: + try: + Word(nums).setName("integer").parseString("ABC") + except ParseException as pe: + print(pe) + print("column: {}".format(pe.col)) + + prints:: + Expected integer (at char 0), (line:1, col:1) + column: 1 + """ + pass + +class ParseFatalException(ParseBaseException): + """user-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately""" + pass + +class ParseSyntaxException(ParseFatalException): + """just like L{ParseFatalException}, but thrown internally when an + L{ErrorStop<And._ErrorStop>} ('-' operator) indicates that parsing is to stop + immediately because an unbacktrackable syntax error has been found""" + pass + +#~ class ReparseException(ParseBaseException): + #~ """Experimental class - parse actions can raise this exception to cause + #~ pyparsing to reparse the input string: + #~ - with a modified input string, and/or + #~ - with a modified start location + #~ Set the values of the ReparseException in the constructor, and raise the + #~ exception in a parse action to cause pyparsing to use the new string/location. + #~ Setting the values as None causes no change to be made. + #~ """ + #~ def __init_( self, newstring, restartLoc ): + #~ self.newParseText = newstring + #~ self.reparseLoc = restartLoc + +class RecursiveGrammarException(Exception): + """exception thrown by L{ParserElement.validate} if the grammar could be improperly recursive""" + def __init__( self, parseElementList ): + self.parseElementTrace = parseElementList + + def __str__( self ): + return "RecursiveGrammarException: %s" % self.parseElementTrace + +class _ParseResultsWithOffset(object): + def __init__(self,p1,p2): + self.tup = (p1,p2) + def __getitem__(self,i): + return self.tup[i] + def __repr__(self): + return repr(self.tup[0]) + def setOffset(self,i): + self.tup = (self.tup[0],i) + +class ParseResults(object): + """ + Structured parse results, to provide multiple means of access to the parsed data: + - as a list (C{len(results)}) + - by list index (C{results[0], results[1]}, etc.) + - by attribute (C{results.<resultsName>} - see L{ParserElement.setResultsName}) + + Example:: + integer = Word(nums) + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + # equivalent form: + # date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + # parseString returns a ParseResults object + result = date_str.parseString("1999/12/31") + + def test(s, fn=repr): + print("%s -> %s" % (s, fn(eval(s)))) + test("list(result)") + test("result[0]") + test("result['month']") + test("result.day") + test("'month' in result") + test("'minutes' in result") + test("result.dump()", str) + prints:: + list(result) -> ['1999', '/', '12', '/', '31'] + result[0] -> '1999' + result['month'] -> '12' + result.day -> '31' + 'month' in result -> True + 'minutes' in result -> False + result.dump() -> ['1999', '/', '12', '/', '31'] + - day: 31 + - month: 12 + - year: 1999 + """ + def __new__(cls, toklist=None, name=None, asList=True, modal=True ): + if isinstance(toklist, cls): + return toklist + retobj = object.__new__(cls) + retobj.__doinit = True + return retobj + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, toklist=None, name=None, asList=True, modal=True, isinstance=isinstance ): + if self.__doinit: + self.__doinit = False + self.__name = None + self.__parent = None + self.__accumNames = {} + self.__asList = asList + self.__modal = modal + if toklist is None: + toklist = [] + if isinstance(toklist, list): + self.__toklist = toklist[:] + elif isinstance(toklist, _generatorType): + self.__toklist = list(toklist) + else: + self.__toklist = [toklist] + self.__tokdict = dict() + + if name is not None and name: + if not modal: + self.__accumNames[name] = 0 + if isinstance(name,int): + name = _ustr(name) # will always return a str, but use _ustr for consistency + self.__name = name + if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): + if isinstance(toklist,basestring): + toklist = [ toklist ] + if asList: + if isinstance(toklist,ParseResults): + self[name] = _ParseResultsWithOffset(toklist.copy(),0) + else: + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) + self[name].__name = name + else: + try: + self[name] = toklist[0] + except (KeyError,TypeError,IndexError): + self[name] = toklist + + def __getitem__( self, i ): + if isinstance( i, (int,slice) ): + return self.__toklist[i] + else: + if i not in self.__accumNames: + return self.__tokdict[i][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[i] ]) + + def __setitem__( self, k, v, isinstance=isinstance ): + if isinstance(v,_ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] + sub = v[0] + elif isinstance(k,(int,slice)): + self.__toklist[k] = v + sub = v + else: + self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] + sub = v + if isinstance(sub,ParseResults): + sub.__parent = wkref(self) + + def __delitem__( self, i ): + if isinstance(i,(int,slice)): + mylen = len( self.__toklist ) + del self.__toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i+1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + for name,occurrences in self.__tokdict.items(): + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) + else: + del self.__tokdict[i] + + def __contains__( self, k ): + return k in self.__tokdict + + def __len__( self ): return len( self.__toklist ) + def __bool__(self): return ( not not self.__toklist ) + __nonzero__ = __bool__ + def __iter__( self ): return iter( self.__toklist ) + def __reversed__( self ): return iter( self.__toklist[::-1] ) + def _iterkeys( self ): + if hasattr(self.__tokdict, "iterkeys"): + return self.__tokdict.iterkeys() + else: + return iter(self.__tokdict) + + def _itervalues( self ): + return (self[k] for k in self._iterkeys()) + + def _iteritems( self ): + return ((k, self[k]) for k in self._iterkeys()) + + if PY_3: + keys = _iterkeys + """Returns an iterator of all named result keys (Python 3.x only).""" + + values = _itervalues + """Returns an iterator of all named result values (Python 3.x only).""" + + items = _iteritems + """Returns an iterator of all named result key-value tuples (Python 3.x only).""" + + else: + iterkeys = _iterkeys + """Returns an iterator of all named result keys (Python 2.x only).""" + + itervalues = _itervalues + """Returns an iterator of all named result values (Python 2.x only).""" + + iteritems = _iteritems + """Returns an iterator of all named result key-value tuples (Python 2.x only).""" + + def keys( self ): + """Returns all named result keys (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iterkeys()) + + def values( self ): + """Returns all named result values (as a list in Python 2.x, as an iterator in Python 3.x).""" + return list(self.itervalues()) + + def items( self ): + """Returns all named result key-values (as a list of tuples in Python 2.x, as an iterator in Python 3.x).""" + return list(self.iteritems()) + + def haskeys( self ): + """Since keys() returns an iterator, this method is helpful in bypassing + code that looks for the existence of any defined results names.""" + return bool(self.__tokdict) + + def pop( self, *args, **kwargs): + """ + Removes and returns item at specified index (default=C{last}). + Supports both C{list} and C{dict} semantics for C{pop()}. If passed no + argument or an integer argument, it will use C{list} semantics + and pop tokens from the list of parsed tokens. If passed a + non-integer argument (most likely a string), it will use C{dict} + semantics and pop the corresponding value from any defined + results names. A second default return value argument is + supported, just as in C{dict.pop()}. + + Example:: + def remove_first(tokens): + tokens.pop(0) + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + print(OneOrMore(Word(nums)).addParseAction(remove_first).parseString("0 123 321")) # -> ['123', '321'] + + label = Word(alphas) + patt = label("LABEL") + OneOrMore(Word(nums)) + print(patt.parseString("AAB 123 321").dump()) + + # Use pop() in a parse action to remove named result (note that corresponding value is not + # removed from list form of results) + def remove_LABEL(tokens): + tokens.pop("LABEL") + return tokens + patt.addParseAction(remove_LABEL) + print(patt.parseString("AAB 123 321").dump()) + prints:: + ['AAB', '123', '321'] + - LABEL: AAB + + ['AAB', '123', '321'] + """ + if not args: + args = [-1] + for k,v in kwargs.items(): + if k == 'default': + args = (args[0], v) + else: + raise TypeError("pop() got an unexpected keyword argument '%s'" % k) + if (isinstance(args[0], int) or + len(args) == 1 or + args[0] in self): + index = args[0] + ret = self[index] + del self[index] + return ret + else: + defaultvalue = args[1] + return defaultvalue + + def get(self, key, defaultValue=None): + """ + Returns named result matching the given key, or if there is no + such name, then returns the given C{defaultValue} or C{None} if no + C{defaultValue} is specified. + + Similar to C{dict.get()}. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString("1999/12/31") + print(result.get("year")) # -> '1999' + print(result.get("hour", "not specified")) # -> 'not specified' + print(result.get("hour")) # -> None + """ + if key in self: + return self[key] + else: + return defaultValue + + def insert( self, index, insStr ): + """ + Inserts new element at location index in the list of parsed tokens. + + Similar to C{list.insert()}. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to insert the parse location in the front of the parsed results + def insert_locn(locn, tokens): + tokens.insert(0, locn) + print(OneOrMore(Word(nums)).addParseAction(insert_locn).parseString("0 123 321")) # -> [0, '0', '123', '321'] + """ + self.__toklist.insert(index, insStr) + # fixup indices in token dictionary + for name,occurrences in self.__tokdict.items(): + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) + + def append( self, item ): + """ + Add single element to end of ParseResults list of elements. + + Example:: + print(OneOrMore(Word(nums)).parseString("0 123 321")) # -> ['0', '123', '321'] + + # use a parse action to compute the sum of the parsed integers, and add it to the end + def append_sum(tokens): + tokens.append(sum(map(int, tokens))) + print(OneOrMore(Word(nums)).addParseAction(append_sum).parseString("0 123 321")) # -> ['0', '123', '321', 444] + """ + self.__toklist.append(item) + + def extend( self, itemseq ): + """ + Add sequence of elements to end of ParseResults list of elements. + + Example:: + patt = OneOrMore(Word(alphas)) + + # use a parse action to append the reverse of the matched strings, to make a palindrome + def make_palindrome(tokens): + tokens.extend(reversed([t[::-1] for t in tokens])) + return ''.join(tokens) + print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl' + """ + if isinstance(itemseq, ParseResults): + self += itemseq + else: + self.__toklist.extend(itemseq) + + def clear( self ): + """ + Clear all elements and results names. + """ + del self.__toklist[:] + self.__tokdict.clear() + + def __getattr__( self, name ): + try: + return self[name] + except KeyError: + return "" + + if name in self.__tokdict: + if name not in self.__accumNames: + return self.__tokdict[name][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[name] ]) + else: + return "" + + def __add__( self, other ): + ret = self.copy() + ret += other + return ret + + def __iadd__( self, other ): + if other.__tokdict: + offset = len(self.__toklist) + addoffset = lambda a: offset if a<0 else a+offset + otheritems = other.__tokdict.items() + otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) + for (k,vlist) in otheritems for v in vlist] + for k,v in otherdictitems: + self[k] = v + if isinstance(v[0],ParseResults): + v[0].__parent = wkref(self) + + self.__toklist += other.__toklist + self.__accumNames.update( other.__accumNames ) + return self + + def __radd__(self, other): + if isinstance(other,int) and other == 0: + # useful for merging many ParseResults using sum() builtin + return self.copy() + else: + # this may raise a TypeError - so be it + return other + self + + def __repr__( self ): + return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) + + def __str__( self ): + return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' + + def _asStringList( self, sep='' ): + out = [] + for item in self.__toklist: + if out and sep: + out.append(sep) + if isinstance( item, ParseResults ): + out += item._asStringList() + else: + out.append( _ustr(item) ) + return out + + def asList( self ): + """ + Returns the parse results as a nested list of matching tokens, all converted to strings. + + Example:: + patt = OneOrMore(Word(alphas)) + result = patt.parseString("sldkj lsdkj sldkj") + # even though the result prints in string-like form, it is actually a pyparsing ParseResults + print(type(result), result) # -> <class 'pyparsing.ParseResults'> ['sldkj', 'lsdkj', 'sldkj'] + + # Use asList() to create an actual list + result_list = result.asList() + print(type(result_list), result_list) # -> <class 'list'> ['sldkj', 'lsdkj', 'sldkj'] + """ + return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] + + def asDict( self ): + """ + Returns the named parse results as a nested dictionary. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(type(result), repr(result)) # -> <class 'pyparsing.ParseResults'> (['12', '/', '31', '/', '1999'], {'day': [('1999', 4)], 'year': [('12', 0)], 'month': [('31', 2)]}) + + result_dict = result.asDict() + print(type(result_dict), repr(result_dict)) # -> <class 'dict'> {'day': '1999', 'year': '12', 'month': '31'} + + # even though a ParseResults supports dict-like access, sometime you just need to have a dict + import json + print(json.dumps(result)) # -> Exception: TypeError: ... is not JSON serializable + print(json.dumps(result.asDict())) # -> {"month": "31", "day": "1999", "year": "12"} + """ + if PY_3: + item_fn = self.items + else: + item_fn = self.iteritems + + def toItem(obj): + if isinstance(obj, ParseResults): + if obj.haskeys(): + return obj.asDict() + else: + return [toItem(v) for v in obj] + else: + return obj + + return dict((k,toItem(v)) for k,v in item_fn()) + + def copy( self ): + """ + Returns a new copy of a C{ParseResults} object. + """ + ret = ParseResults( self.__toklist ) + ret.__tokdict = self.__tokdict.copy() + ret.__parent = self.__parent + ret.__accumNames.update( self.__accumNames ) + ret.__name = self.__name + return ret + + def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): + """ + (Deprecated) Returns the parse results as XML. Tags are created for tokens and lists that have defined results names. + """ + nl = "\n" + out = [] + namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() + for v in vlist) + nextLevelIndent = indent + " " + + # collapse out indents if formatting is not desired + if not formatted: + indent = "" + nextLevelIndent = "" + nl = "" + + selfTag = None + if doctag is not None: + selfTag = doctag + else: + if self.__name: + selfTag = self.__name + + if not selfTag: + if namedItemsOnly: + return "" + else: + selfTag = "ITEM" + + out += [ nl, indent, "<", selfTag, ">" ] + + for i,res in enumerate(self.__toklist): + if isinstance(res,ParseResults): + if i in namedItems: + out += [ res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + out += [ res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + # individual token, see if there is a name for it + resTag = None + if i in namedItems: + resTag = namedItems[i] + if not resTag: + if namedItemsOnly: + continue + else: + resTag = "ITEM" + xmlBodyText = _xml_escape(_ustr(res)) + out += [ nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + "</", resTag, ">" ] + + out += [ nl, indent, "</", selfTag, ">" ] + return "".join(out) + + def __lookup(self,sub): + for k,vlist in self.__tokdict.items(): + for v,loc in vlist: + if sub is v: + return k + return None + + def getName(self): + r""" + Returns the results name for this token expression. Useful when several + different expressions might match at a particular location. + + Example:: + integer = Word(nums) + ssn_expr = Regex(r"\d\d\d-\d\d-\d\d\d\d") + house_number_expr = Suppress('#') + Word(nums, alphanums) + user_data = (Group(house_number_expr)("house_number") + | Group(ssn_expr)("ssn") + | Group(integer)("age")) + user_info = OneOrMore(user_data) + + result = user_info.parseString("22 111-22-3333 #221B") + for item in result: + print(item.getName(), ':', item[0]) + prints:: + age : 22 + ssn : 111-22-3333 + house_number : 221B + """ + if self.__name: + return self.__name + elif self.__parent: + par = self.__parent() + if par: + return par.__lookup(self) + else: + return None + elif (len(self) == 1 and + len(self.__tokdict) == 1 and + next(iter(self.__tokdict.values()))[0][1] in (0,-1)): + return next(iter(self.__tokdict.keys())) + else: + return None + + def dump(self, indent='', depth=0, full=True): + """ + Diagnostic method for listing out the contents of a C{ParseResults}. + Accepts an optional C{indent} argument so that this string can be embedded + in a nested display of other data. + + Example:: + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + result = date_str.parseString('12/31/1999') + print(result.dump()) + prints:: + ['12', '/', '31', '/', '1999'] + - day: 1999 + - month: 31 + - year: 12 + """ + out = [] + NL = '\n' + out.append( indent+_ustr(self.asList()) ) + if full: + if self.haskeys(): + items = sorted((str(k), v) for k,v in self.items()) + for k,v in items: + if out: + out.append(NL) + out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) + if isinstance(v,ParseResults): + if v: + out.append( v.dump(indent,depth+1) ) + else: + out.append(_ustr(v)) + else: + out.append(repr(v)) + elif any(isinstance(vv,ParseResults) for vv in self): + v = self + for i,vv in enumerate(v): + if isinstance(vv,ParseResults): + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) + else: + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) + + return "".join(out) + + def pprint(self, *args, **kwargs): + """ + Pretty-printer for parsed results as a list, using the C{pprint} module. + Accepts additional positional or keyword args as defined for the + C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint}) + + Example:: + ident = Word(alphas, alphanums) + num = Word(nums) + func = Forward() + term = ident | num | Group('(' + func + ')') + func <<= ident + Group(Optional(delimitedList(term))) + result = func.parseString("fna a,b,(fnb c,d,200),100") + result.pprint(width=40) + prints:: + ['fna', + ['a', + 'b', + ['(', 'fnb', ['c', 'd', '200'], ')'], + '100']] + """ + pprint.pprint(self.asList(), *args, **kwargs) + + # add support for pickle protocol + def __getstate__(self): + return ( self.__toklist, + ( self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name ) ) + + def __setstate__(self,state): + self.__toklist = state[0] + (self.__tokdict, + par, + inAccumNames, + self.__name) = state[1] + self.__accumNames = {} + self.__accumNames.update(inAccumNames) + if par is not None: + self.__parent = wkref(par) + else: + self.__parent = None + + def __getnewargs__(self): + return self.__toklist, self.__name, self.__asList, self.__modal + + def __dir__(self): + return (dir(type(self)) + list(self.keys())) + +MutableMapping.register(ParseResults) + +def col (loc,strg): + """Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + s = strg + return 1 if 0<loc<len(s) and s[loc-1] == '\n' else loc - s.rfind("\n", 0, loc) + +def lineno(loc,strg): + """Returns current line number within a string, counting newlines as line separators. + The first line is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + return strg.count("\n",0,loc) + 1 + +def line( loc, strg ): + """Returns the line of text containing loc within a string, counting newlines as line separators. + """ + lastCR = strg.rfind("\n", 0, loc) + nextCR = strg.find("\n", loc) + if nextCR >= 0: + return strg[lastCR+1:nextCR] + else: + return strg[lastCR+1:] + +def _defaultStartDebugAction( instring, loc, expr ): + print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) + +def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): + print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) + +def _defaultExceptionDebugAction( instring, loc, expr, exc ): + print ("Exception raised:" + _ustr(exc)) + +def nullDebugAction(*args): + """'Do-nothing' debug action, to suppress debugging output during parsing.""" + pass + +# Only works on Python 3.x - nonlocal is toxic to Python 2 installs +#~ 'decorator to trim function calls to match the arity of the target' +#~ def _trim_arity(func, maxargs=3): + #~ if func in singleArgBuiltins: + #~ return lambda s,l,t: func(t) + #~ limit = 0 + #~ foundArity = False + #~ def wrapper(*args): + #~ nonlocal limit,foundArity + #~ while 1: + #~ try: + #~ ret = func(*args[limit:]) + #~ foundArity = True + #~ return ret + #~ except TypeError: + #~ if limit == maxargs or foundArity: + #~ raise + #~ limit += 1 + #~ continue + #~ return wrapper + +# this version is Python 2.x-3.x cross-compatible +'decorator to trim function calls to match the arity of the target' +def _trim_arity(func, maxargs=2): + if func in singleArgBuiltins: + return lambda s,l,t: func(t) + limit = [0] + foundArity = [False] + + # traceback return data structure changed in Py3.5 - normalize back to plain tuples + if system_version[:2] >= (3,5): + def extract_stack(limit=0): + # special handling for Python 3.5.0 - extra deep call stack by 1 + offset = -3 if system_version == (3,5,0) else -2 + frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] + return [frame_summary[:2]] + def extract_tb(tb, limit=0): + frames = traceback.extract_tb(tb, limit=limit) + frame_summary = frames[-1] + return [frame_summary[:2]] + else: + extract_stack = traceback.extract_stack + extract_tb = traceback.extract_tb + + # synthesize what would be returned by traceback.extract_stack at the call to + # user's parse action 'func', so that we don't incur call penalty at parse time + + LINE_DIFF = 6 + # IF ANY CODE CHANGES, EVEN JUST COMMENTS OR BLANK LINES, BETWEEN THE NEXT LINE AND + # THE CALL TO FUNC INSIDE WRAPPER, LINE_DIFF MUST BE MODIFIED!!!! + this_line = extract_stack(limit=2)[-1] + pa_call_line_synth = (this_line[0], this_line[1]+LINE_DIFF) + + def wrapper(*args): + while 1: + try: + ret = func(*args[limit[0]:]) + foundArity[0] = True + return ret + except TypeError: + # re-raise TypeErrors if they did not come from our arity testing + if foundArity[0]: + raise + else: + try: + tb = sys.exc_info()[-1] + if not extract_tb(tb, limit=2)[-1][:2] == pa_call_line_synth: + raise + finally: + del tb + + if limit[0] <= maxargs: + limit[0] += 1 + continue + raise + + # copy func name to wrapper for sensible debug output + func_name = "<parse action>" + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + wrapper.__name__ = func_name + + return wrapper + +class ParserElement(object): + """Abstract base level parser element class.""" + DEFAULT_WHITE_CHARS = " \n\t\r" + verbose_stacktrace = False + + @staticmethod + def setDefaultWhitespaceChars( chars ): + r""" + Overrides the default whitespace chars + + Example:: + # default whitespace chars are space, <TAB> and newline + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def', 'ghi', 'jkl'] + + # change to just treat newline as significant + ParserElement.setDefaultWhitespaceChars(" \t") + OneOrMore(Word(alphas)).parseString("abc def\nghi jkl") # -> ['abc', 'def'] + """ + ParserElement.DEFAULT_WHITE_CHARS = chars + + @staticmethod + def inlineLiteralsUsing(cls): + """ + Set class to be used for inclusion of string literals into a parser. + + Example:: + # default literal class used is Literal + integer = Word(nums) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + + # change to Suppress + ParserElement.inlineLiteralsUsing(Suppress) + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + + date_str.parseString("1999/12/31") # -> ['1999', '12', '31'] + """ + ParserElement._literalStringClass = cls + + def __init__( self, savelist=False ): + self.parseAction = list() + self.failAction = None + #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall + self.strRepr = None + self.resultsName = None + self.saveAsList = savelist + self.skipWhitespace = True + self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + self.copyDefaultWhiteChars = True + self.mayReturnEmpty = False # used when checking for left-recursion + self.keepTabs = False + self.ignoreExprs = list() + self.debug = False + self.streamlined = False + self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index + self.errmsg = "" + self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) + self.debugActions = ( None, None, None ) #custom debug actions + self.re = None + self.callPreparse = True # used to avoid redundant calls to preParse + self.callDuringTry = False + + def copy( self ): + """ + Make a copy of this C{ParserElement}. Useful for defining different parse actions + for the same parsing pattern, using copies of the original parse element. + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + integerK = integer.copy().addParseAction(lambda toks: toks[0]*1024) + Suppress("K") + integerM = integer.copy().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + + print(OneOrMore(integerK | integerM | integer).parseString("5K 100 640K 256M")) + prints:: + [5120, 100, 655360, 268435456] + Equivalent form of C{expr.copy()} is just C{expr()}:: + integerM = integer().addParseAction(lambda toks: toks[0]*1024*1024) + Suppress("M") + """ + cpy = copy.copy( self ) + cpy.parseAction = self.parseAction[:] + cpy.ignoreExprs = self.ignoreExprs[:] + if self.copyDefaultWhiteChars: + cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + return cpy + + def setName( self, name ): + """ + Define name for this expression, makes debugging and exception messages clearer. + + Example:: + Word(nums).parseString("ABC") # -> Exception: Expected W:(0123...) (at char 0), (line:1, col:1) + Word(nums).setName("integer").parseString("ABC") # -> Exception: Expected integer (at char 0), (line:1, col:1) + """ + self.name = name + self.errmsg = "Expected " + self.name + if hasattr(self,"exception"): + self.exception.msg = self.errmsg + return self + + def setResultsName( self, name, listAllMatches=False ): + """ + Define name for referencing matching tokens as a nested attribute + of the returned parse results. + NOTE: this returns a *copy* of the original C{ParserElement} object; + this is so that the client can define a basic element, such as an + integer, and reference it in multiple places with different names. + + You can also set results names using the abbreviated syntax, + C{expr("name")} in place of C{expr.setResultsName("name")} - + see L{I{__call__}<__call__>}. + + Example:: + date_str = (integer.setResultsName("year") + '/' + + integer.setResultsName("month") + '/' + + integer.setResultsName("day")) + + # equivalent form: + date_str = integer("year") + '/' + integer("month") + '/' + integer("day") + """ + newself = self.copy() + if name.endswith("*"): + name = name[:-1] + listAllMatches=True + newself.resultsName = name + newself.modalResults = not listAllMatches + return newself + + def setBreak(self,breakFlag = True): + """Method to invoke the Python pdb debugger when this element is + about to be parsed. Set C{breakFlag} to True to enable, False to + disable. + """ + if breakFlag: + _parseMethod = self._parse + def breaker(instring, loc, doActions=True, callPreParse=True): + import pdb + pdb.set_trace() + return _parseMethod( instring, loc, doActions, callPreParse ) + breaker._originalParseMethod = _parseMethod + self._parse = breaker + else: + if hasattr(self._parse,"_originalParseMethod"): + self._parse = self._parse._originalParseMethod + return self + + def setParseAction( self, *fns, **kwargs ): + """ + Define one or more actions to perform when successfully matching parse element definition. + Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, + C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object + If the functions in fns modify the tokens, they can return them as the return + value from fn, and the modified list of tokens will replace the original. + Otherwise, fn does not need to return any value. + + Optional keyword arguments: + - callDuringTry = (default=C{False}) indicate if parse action should be run during lookaheads and alternate testing + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{parseString}<parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + + Example:: + integer = Word(nums) + date_str = integer + '/' + integer + '/' + integer + + date_str.parseString("1999/12/31") # -> ['1999', '/', '12', '/', '31'] + + # use parse action to convert to ints at parse time + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + date_str = integer + '/' + integer + '/' + integer + + # note that integer fields are now ints, not strings + date_str.parseString("1999/12/31") # -> [1999, '/', 12, '/', 31] + """ + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = kwargs.get("callDuringTry", False) + return self + + def addParseAction( self, *fns, **kwargs ): + """ + Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}. + + See examples in L{I{copy}<copy>}. + """ + self.parseAction += list(map(_trim_arity, list(fns))) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def addCondition(self, *fns, **kwargs): + """Add a boolean predicate function to expression's list of parse actions. See + L{I{setParseAction}<setParseAction>} for function call signatures. Unlike C{setParseAction}, + functions passed to C{addCondition} need to return boolean success/fail of the condition. + + Optional keyword arguments: + - message = define a custom message to be used in the raised exception + - fatal = if True, will raise ParseFatalException to stop parsing immediately; otherwise will raise ParseException + + Example:: + integer = Word(nums).setParseAction(lambda toks: int(toks[0])) + year_int = integer.copy() + year_int.addCondition(lambda toks: toks[0] >= 2000, message="Only support years 2000 and later") + date_str = year_int + '/' + integer + '/' + integer + + result = date_str.parseString("1999/12/31") # -> Exception: Only support years 2000 and later (at char 0), (line:1, col:1) + """ + msg = kwargs.get("message", "failed user-defined condition") + exc_type = ParseFatalException if kwargs.get("fatal", False) else ParseException + for fn in fns: + def pa(s,l,t): + if not bool(_trim_arity(fn)(s,l,t)): + raise exc_type(s,l,msg) + self.parseAction.append(pa) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def setFailAction( self, fn ): + """Define action to perform if parsing fails at this expression. + Fail acton fn is a callable function that takes the arguments + C{fn(s,loc,expr,err)} where: + - s = string being parsed + - loc = location where expression match was attempted and failed + - expr = the parse expression that failed + - err = the exception thrown + The function returns no value. It may throw C{L{ParseFatalException}} + if it is desired to stop parsing immediately.""" + self.failAction = fn + return self + + def _skipIgnorables( self, instring, loc ): + exprsFound = True + while exprsFound: + exprsFound = False + for e in self.ignoreExprs: + try: + while 1: + loc,dummy = e._parse( instring, loc ) + exprsFound = True + except ParseException: + pass + return loc + + def preParse( self, instring, loc ): + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + + if self.skipWhitespace: + wt = self.whiteChars + instrlen = len(instring) + while loc < instrlen and instring[loc] in wt: + loc += 1 + + return loc + + def parseImpl( self, instring, loc, doActions=True ): + return loc, [] + + def postParse( self, instring, loc, tokenlist ): + return tokenlist + + #~ @profile + def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): + debugging = ( self.debug ) #and doActions ) + + if debugging or self.failAction: + #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) + if (self.debugActions[0] ): + self.debugActions[0]( instring, loc, self ) + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + try: + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + except ParseBaseException as err: + #~ print ("Exception raised:", err) + if self.debugActions[2]: + self.debugActions[2]( instring, tokensStart, self, err ) + if self.failAction: + self.failAction( instring, tokensStart, self, err ) + raise + else: + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or preloc >= len(instring): + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + else: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + + tokens = self.postParse( instring, loc, tokens ) + + retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) + if self.parseAction and (doActions or self.callDuringTry): + if debugging: + try: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + except ParseBaseException as err: + #~ print "Exception raised in user parse action:", err + if (self.debugActions[2] ): + self.debugActions[2]( instring, tokensStart, self, err ) + raise + else: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + if debugging: + #~ print ("Matched",self,"->",retTokens.asList()) + if (self.debugActions[1] ): + self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) + + return loc, retTokens + + def tryParse( self, instring, loc ): + try: + return self._parse( instring, loc, doActions=False )[0] + except ParseFatalException: + raise ParseException( instring, loc, self.errmsg, self) + + def canParseNext(self, instring, loc): + try: + self.tryParse(instring, loc) + except (ParseException, IndexError): + return False + else: + return True + + class _UnboundedCache(object): + def __init__(self): + cache = {} + self.not_in_cache = not_in_cache = object() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + + def clear(self): + cache.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + if _OrderedDict is not None: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = _OrderedDict() + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + while len(cache) > size: + try: + cache.popitem(False) + except KeyError: + pass + + def clear(self): + cache.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + else: + class _FifoCache(object): + def __init__(self, size): + self.not_in_cache = not_in_cache = object() + + cache = {} + key_fifo = collections.deque([], size) + + def get(self, key): + return cache.get(key, not_in_cache) + + def set(self, key, value): + cache[key] = value + while len(key_fifo) > size: + cache.pop(key_fifo.popleft(), None) + key_fifo.append(key) + + def clear(self): + cache.clear() + key_fifo.clear() + + def cache_len(self): + return len(cache) + + self.get = types.MethodType(get, self) + self.set = types.MethodType(set, self) + self.clear = types.MethodType(clear, self) + self.__len__ = types.MethodType(cache_len, self) + + # argument cache for optimizing repeated calls when backtracking through recursive expressions + packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail + packrat_cache_lock = RLock() + packrat_cache_stats = [0, 0] + + # this method gets repeatedly called during backtracking with the same arguments - + # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression + def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): + HIT, MISS = 0, 1 + lookup = (self, instring, loc, callPreParse, doActions) + with ParserElement.packrat_cache_lock: + cache = ParserElement.packrat_cache + value = cache.get(lookup) + if value is cache.not_in_cache: + ParserElement.packrat_cache_stats[MISS] += 1 + try: + value = self._parseNoCache(instring, loc, doActions, callPreParse) + except ParseBaseException as pe: + # cache a copy of the exception, without the traceback + cache.set(lookup, pe.__class__(*pe.args)) + raise + else: + cache.set(lookup, (value[0], value[1].copy())) + return value + else: + ParserElement.packrat_cache_stats[HIT] += 1 + if isinstance(value, Exception): + raise value + return (value[0], value[1].copy()) + + _parse = _parseNoCache + + @staticmethod + def resetCache(): + ParserElement.packrat_cache.clear() + ParserElement.packrat_cache_stats[:] = [0] * len(ParserElement.packrat_cache_stats) + + _packratEnabled = False + @staticmethod + def enablePackrat(cache_size_limit=128): + """Enables "packrat" parsing, which adds memoizing to the parsing logic. + Repeated parse attempts at the same string location (which happens + often in many complex grammars) can immediately return a cached value, + instead of re-executing parsing/validating code. Memoizing is done of + both valid results and parsing exceptions. + + Parameters: + - cache_size_limit - (default=C{128}) - if an integer value is provided + will limit the size of the packrat cache; if None is passed, then + the cache size will be unbounded; if 0 is passed, the cache will + be effectively disabled. + + This speedup may break existing programs that use parse actions that + have side-effects. For this reason, packrat parsing is disabled when + you first import pyparsing. To activate the packrat feature, your + program must call the class method C{ParserElement.enablePackrat()}. If + your program uses C{psyco} to "compile as you go", you must call + C{enablePackrat} before calling C{psyco.full()}. If you do not do this, + Python will crash. For best results, call C{enablePackrat()} immediately + after importing pyparsing. + + Example:: + import pyparsing + pyparsing.ParserElement.enablePackrat() + """ + if not ParserElement._packratEnabled: + ParserElement._packratEnabled = True + if cache_size_limit is None: + ParserElement.packrat_cache = ParserElement._UnboundedCache() + else: + ParserElement.packrat_cache = ParserElement._FifoCache(cache_size_limit) + ParserElement._parse = ParserElement._parseCache + + def parseString( self, instring, parseAll=False ): + """ + Execute the parse expression with the given string. + This is the main interface to the client code, once the complete + expression has been built. + + If you want the grammar to require that the entire input string be + successfully parsed, then set C{parseAll} to True (equivalent to ending + the grammar with C{L{StringEnd()}}). + + Note: C{parseString} implicitly calls C{expandtabs()} on the input string, + in order to report proper column numbers in parse actions. + If the input string contains tabs and + the grammar uses parse actions that use the C{loc} argument to index into the + string being parsed, you can ensure you have a consistent view of the input + string by: + - calling C{parseWithTabs} on your grammar before calling C{parseString} + (see L{I{parseWithTabs}<parseWithTabs>}) + - define your parse action using the full C{(s,loc,toks)} signature, and + reference the input string using the parse action's C{s} argument + - explictly expand the tabs in your input string before calling + C{parseString} + + Example:: + Word('a').parseString('aaaaabaaa') # -> ['aaaaa'] + Word('a').parseString('aaaaabaaa', parseAll=True) # -> Exception: Expected end of text + """ + ParserElement.resetCache() + if not self.streamlined: + self.streamline() + #~ self.saveAsList = True + for e in self.ignoreExprs: + e.streamline() + if not self.keepTabs: + instring = instring.expandtabs() + try: + loc, tokens = self._parse( instring, 0 ) + if parseAll: + loc = self.preParse( instring, loc ) + se = Empty() + StringEnd() + se._parse( instring, loc ) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + else: + return tokens + + def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): + """ + Scan the input string for expression matches. Each match will return the + matching tokens, start location, and end location. May be called with optional + C{maxMatches} argument, to clip scanning after 'n' matches are found. If + C{overlap} is specified, then overlapping matches will be reported. + + Note that the start and end locations are reported relative to the string + being parsed. See L{I{parseString}<parseString>} for more information on parsing + strings with embedded tabs. + + Example:: + source = "sldjf123lsdjjkf345sldkjf879lkjsfd987" + print(source) + for tokens,start,end in Word(alphas).scanString(source): + print(' '*start + '^'*(end-start)) + print(' '*start + tokens[0]) + + prints:: + + sldjf123lsdjjkf345sldkjf879lkjsfd987 + ^^^^^ + sldjf + ^^^^^^^ + lsdjjkf + ^^^^^^ + sldkjf + ^^^^^^ + lkjsfd + """ + if not self.streamlined: + self.streamline() + for e in self.ignoreExprs: + e.streamline() + + if not self.keepTabs: + instring = _ustr(instring).expandtabs() + instrlen = len(instring) + loc = 0 + preparseFn = self.preParse + parseFn = self._parse + ParserElement.resetCache() + matches = 0 + try: + while loc <= instrlen and matches < maxMatches: + try: + preloc = preparseFn( instring, loc ) + nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) + except ParseException: + loc = preloc+1 + else: + if nextLoc > loc: + matches += 1 + yield tokens, preloc, nextLoc + if overlap: + nextloc = preparseFn( instring, loc ) + if nextloc > loc: + loc = nextLoc + else: + loc += 1 + else: + loc = nextLoc + else: + loc = preloc+1 + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def transformString( self, instring ): + """ + Extension to C{L{scanString}}, to modify matching text with modified tokens that may + be returned from a parse action. To use C{transformString}, define a grammar and + attach a parse action to it that modifies the returned token list. + Invoking C{transformString()} on a target string will then scan for matches, + and replace the matched text patterns according to the logic in the parse + action. C{transformString()} returns the resulting transformed string. + + Example:: + wd = Word(alphas) + wd.setParseAction(lambda toks: toks[0].title()) + + print(wd.transformString("now is the winter of our discontent made glorious summer by this sun of york.")) + Prints:: + Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York. + """ + out = [] + lastE = 0 + # force preservation of <TAB>s, to minimize unwanted transformation of string, and to + # keep string locs straight between transformString and scanString + self.keepTabs = True + try: + for t,s,e in self.scanString( instring ): + out.append( instring[lastE:s] ) + if t: + if isinstance(t,ParseResults): + out += t.asList() + elif isinstance(t,list): + out += t + else: + out.append(t) + lastE = e + out.append(instring[lastE:]) + out = [o for o in out if o] + return "".join(map(_ustr,_flatten(out))) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def searchString( self, instring, maxMatches=_MAX_INT ): + """ + Another extension to C{L{scanString}}, simplifying the access to the tokens found + to match the given parse expression. May be called with optional + C{maxMatches} argument, to clip searching after 'n' matches are found. + + Example:: + # a capitalized word starts with an uppercase letter, followed by zero or more lowercase letters + cap_word = Word(alphas.upper(), alphas.lower()) + + print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) + + # the sum() builtin can be used to merge results into a single ParseResults object + print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))) + prints:: + [['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']] + ['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity'] + """ + try: + return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def split(self, instring, maxsplit=_MAX_INT, includeSeparators=False): + """ + Generator method to split a string using the given expression as a separator. + May be called with optional C{maxsplit} argument, to limit the number of splits; + and the optional C{includeSeparators} argument (default=C{False}), if the separating + matching text should be included in the split results. + + Example:: + punc = oneOf(list(".,;:/-!?")) + print(list(punc.split("This, this?, this sentence, is badly punctuated!"))) + prints:: + ['This', ' this', '', ' this sentence', ' is badly punctuated', ''] + """ + splits = 0 + last = 0 + for t,s,e in self.scanString(instring, maxMatches=maxsplit): + yield instring[last:s] + if includeSeparators: + yield t[0] + last = e + yield instring[last:] + + def __add__(self, other ): + """ + Implementation of + operator - returns C{L{And}}. Adding strings to a ParserElement + converts them to L{Literal}s by default. + + Example:: + greet = Word(alphas) + "," + Word(alphas) + "!" + hello = "Hello, World!" + print (hello, "->", greet.parseString(hello)) + Prints:: + Hello, World! -> ['Hello', ',', 'World', '!'] + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, other ] ) + + def __radd__(self, other ): + """ + Implementation of + operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other + self + + def __sub__(self, other): + """ + Implementation of - operator, returns C{L{And}} with error stop + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return self + And._ErrorStop() + other + + def __rsub__(self, other ): + """ + Implementation of - operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other - self + + def __mul__(self,other): + """ + Implementation of * operator, allows use of C{expr * 3} in place of + C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer + tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples + may also include C{None} as in: + - C{expr*(n,None)} or C{expr*(n,)} is equivalent + to C{expr*n + L{ZeroOrMore}(expr)} + (read as "at least n instances of C{expr}") + - C{expr*(None,n)} is equivalent to C{expr*(0,n)} + (read as "0 to n instances of C{expr}") + - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} + - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} + + Note that C{expr*(None,n)} does not raise an exception if + more than n exprs exist in the input stream; that is, + C{expr*(None,n)} does not enforce a maximum number of expr + occurrences. If this behavior is desired, then write + C{expr*(None,n) + ~expr} + """ + if isinstance(other,int): + minElements, optElements = other,0 + elif isinstance(other,tuple): + other = (other + (None, None))[:2] + if other[0] is None: + other = (0, other[1]) + if isinstance(other[0],int) and other[1] is None: + if other[0] == 0: + return ZeroOrMore(self) + if other[0] == 1: + return OneOrMore(self) + else: + return self*other[0] + ZeroOrMore(self) + elif isinstance(other[0],int) and isinstance(other[1],int): + minElements, optElements = other + optElements -= minElements + else: + raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) + else: + raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) + + if minElements < 0: + raise ValueError("cannot multiply ParserElement by negative value") + if optElements < 0: + raise ValueError("second tuple value must be greater or equal to first tuple value") + if minElements == optElements == 0: + raise ValueError("cannot multiply ParserElement by 0 or (0,0)") + + if (optElements): + def makeOptionalList(n): + if n>1: + return Optional(self + makeOptionalList(n-1)) + else: + return Optional(self) + if minElements: + if minElements == 1: + ret = self + makeOptionalList(optElements) + else: + ret = And([self]*minElements) + makeOptionalList(optElements) + else: + ret = makeOptionalList(optElements) + else: + if minElements == 1: + ret = self + else: + ret = And([self]*minElements) + return ret + + def __rmul__(self, other): + return self.__mul__(other) + + def __or__(self, other ): + """ + Implementation of | operator - returns C{L{MatchFirst}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return MatchFirst( [ self, other ] ) + + def __ror__(self, other ): + """ + Implementation of | operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other | self + + def __xor__(self, other ): + """ + Implementation of ^ operator - returns C{L{Or}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Or( [ self, other ] ) + + def __rxor__(self, other ): + """ + Implementation of ^ operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other ^ self + + def __and__(self, other ): + """ + Implementation of & operator - returns C{L{Each}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Each( [ self, other ] ) + + def __rand__(self, other ): + """ + Implementation of & operator when left operand is not a C{L{ParserElement}} + """ + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other & self + + def __invert__( self ): + """ + Implementation of ~ operator - returns C{L{NotAny}} + """ + return NotAny( self ) + + def __call__(self, name=None): + """ + Shortcut for C{L{setResultsName}}, with C{listAllMatches=False}. + + If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be + passed as C{True}. + + If C{name} is omitted, same as calling C{L{copy}}. + + Example:: + # these are equivalent + userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") + userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + """ + if name is not None: + return self.setResultsName(name) + else: + return self.copy() + + def suppress( self ): + """ + Suppresses the output of this C{ParserElement}; useful to keep punctuation from + cluttering up returned output. + """ + return Suppress( self ) + + def leaveWhitespace( self ): + """ + Disables the skipping of whitespace before matching the characters in the + C{ParserElement}'s defined pattern. This is normally only used internally by + the pyparsing module, but may be needed in some whitespace-sensitive grammars. + """ + self.skipWhitespace = False + return self + + def setWhitespaceChars( self, chars ): + """ + Overrides the default whitespace chars + """ + self.skipWhitespace = True + self.whiteChars = chars + self.copyDefaultWhiteChars = False + return self + + def parseWithTabs( self ): + """ + Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string. + Must be called before C{parseString} when the input grammar contains elements that + match C{<TAB>} characters. + """ + self.keepTabs = True + return self + + def ignore( self, other ): + """ + Define expression to be ignored (e.g., comments) while doing pattern + matching; may be called repeatedly, to define multiple comment or other + ignorable patterns. + + Example:: + patt = OneOrMore(Word(alphas)) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj'] + + patt.ignore(cStyleComment) + patt.parseString('ablaj /* comment */ lskjd') # -> ['ablaj', 'lskjd'] + """ + if isinstance(other, basestring): + other = Suppress(other) + + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + self.ignoreExprs.append(other) + else: + self.ignoreExprs.append( Suppress( other.copy() ) ) + return self + + def setDebugActions( self, startAction, successAction, exceptionAction ): + """ + Enable display of debugging messages while doing pattern matching. + """ + self.debugActions = (startAction or _defaultStartDebugAction, + successAction or _defaultSuccessDebugAction, + exceptionAction or _defaultExceptionDebugAction) + self.debug = True + return self + + def setDebug( self, flag=True ): + """ + Enable display of debugging messages while doing pattern matching. + Set C{flag} to True to enable, False to disable. + + Example:: + wd = Word(alphas).setName("alphaword") + integer = Word(nums).setName("numword") + term = wd | integer + + # turn on debugging for wd + wd.setDebug() + + OneOrMore(term).parseString("abc 123 xyz 890") + + prints:: + Match alphaword at loc 0(1,1) + Matched alphaword -> ['abc'] + Match alphaword at loc 3(1,4) + Exception raised:Expected alphaword (at char 4), (line:1, col:5) + Match alphaword at loc 7(1,8) + Matched alphaword -> ['xyz'] + Match alphaword at loc 11(1,12) + Exception raised:Expected alphaword (at char 12), (line:1, col:13) + Match alphaword at loc 15(1,16) + Exception raised:Expected alphaword (at char 15), (line:1, col:16) + + The output shown is that produced by the default debug actions - custom debug actions can be + specified using L{setDebugActions}. Prior to attempting + to match the C{wd} expression, the debugging message C{"Match <exprname> at loc <n>(<line>,<col>)"} + is shown. Then if the parse succeeds, a C{"Matched"} message is shown, or an C{"Exception raised"} + message is shown. Also note the use of L{setName} to assign a human-readable name to the expression, + which makes debugging and exception messages easier to understand - for instance, the default + name created for the C{Word} expression without calling C{setName} is C{"W:(ABCD...)"}. + """ + if flag: + self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) + else: + self.debug = False + return self + + def __str__( self ): + return self.name + + def __repr__( self ): + return _ustr(self) + + def streamline( self ): + self.streamlined = True + self.strRepr = None + return self + + def checkRecursion( self, parseElementList ): + pass + + def validate( self, validateTrace=[] ): + """ + Check defined expressions for valid structure, check for infinite recursive definitions. + """ + self.checkRecursion( [] ) + + def parseFile( self, file_or_filename, parseAll=False ): + """ + Execute the parse expression on the given file or filename. + If a filename is specified (instead of a file object), + the entire file is opened, read, and closed before parsing. + """ + try: + file_contents = file_or_filename.read() + except AttributeError: + with open(file_or_filename, "r") as f: + file_contents = f.read() + try: + return self.parseString(file_contents, parseAll) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def __eq__(self,other): + if isinstance(other, ParserElement): + return self is other or vars(self) == vars(other) + elif isinstance(other, basestring): + return self.matches(other) + else: + return super(ParserElement,self)==other + + def __ne__(self,other): + return not (self == other) + + def __hash__(self): + return hash(id(self)) + + def __req__(self,other): + return self == other + + def __rne__(self,other): + return not (self == other) + + def matches(self, testString, parseAll=True): + """ + Method for quick testing of a parser against a test string. Good for simple + inline microtests of sub expressions while building up larger parser. + + Parameters: + - testString - to test against this expression for a match + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + + Example:: + expr = Word(nums) + assert expr.matches("100") + """ + try: + self.parseString(_ustr(testString), parseAll=parseAll) + return True + except ParseBaseException: + return False + + def runTests(self, tests, parseAll=True, comment='#', fullDump=True, printResults=True, failureTests=False): + """ + Execute the parse expression on a series of test strings, showing each + test, the parsed results or where the parse failed. Quick and easy way to + run a parse expression against a list of sample strings. + + Parameters: + - tests - a list of separate test strings, or a multiline string of test strings + - parseAll - (default=C{True}) - flag to pass to C{L{parseString}} when running tests + - comment - (default=C{'#'}) - expression for indicating embedded comments in the test + string; pass None to disable comment filtering + - fullDump - (default=C{True}) - dump results as list followed by results names in nested outline; + if False, only dump nested list + - printResults - (default=C{True}) prints test output to stdout + - failureTests - (default=C{False}) indicates if these tests are expected to fail parsing + + Returns: a (success, results) tuple, where success indicates that all tests succeeded + (or failed if C{failureTests} is True), and the results contain a list of lines of each + test's output + + Example:: + number_expr = pyparsing_common.number.copy() + + result = number_expr.runTests(''' + # unsigned integer + 100 + # negative integer + -100 + # float with scientific notation + 6.02e23 + # integer with scientific notation + 1e-12 + ''') + print("Success" if result[0] else "Failed!") + + result = number_expr.runTests(''' + # stray character + 100Z + # missing leading digit before '.' + -.100 + # too many '.' + 3.14.159 + ''', failureTests=True) + print("Success" if result[0] else "Failed!") + prints:: + # unsigned integer + 100 + [100] + + # negative integer + -100 + [-100] + + # float with scientific notation + 6.02e23 + [6.02e+23] + + # integer with scientific notation + 1e-12 + [1e-12] + + Success + + # stray character + 100Z + ^ + FAIL: Expected end of text (at char 3), (line:1, col:4) + + # missing leading digit before '.' + -.100 + ^ + FAIL: Expected {real number with scientific notation | real number | signed integer} (at char 0), (line:1, col:1) + + # too many '.' + 3.14.159 + ^ + FAIL: Expected end of text (at char 4), (line:1, col:5) + + Success + + Each test string must be on a single line. If you want to test a string that spans multiple + lines, create a test like this:: + + expr.runTest(r"this is a test\\n of strings that spans \\n 3 lines") + + (Note that this is a raw string literal, you must include the leading 'r'.) + """ + if isinstance(tests, basestring): + tests = list(map(str.strip, tests.rstrip().splitlines())) + if isinstance(comment, basestring): + comment = Literal(comment) + allResults = [] + comments = [] + success = True + for t in tests: + if comment is not None and comment.matches(t, False) or comments and not t: + comments.append(t) + continue + if not t: + continue + out = ['\n'.join(comments), t] + comments = [] + try: + t = t.replace(r'\n','\n') + result = self.parseString(t, parseAll=parseAll) + out.append(result.dump(full=fullDump)) + success = success and not failureTests + except ParseBaseException as pe: + fatal = "(FATAL)" if isinstance(pe, ParseFatalException) else "" + if '\n' in t: + out.append(line(pe.loc, t)) + out.append(' '*(col(pe.loc,t)-1) + '^' + fatal) + else: + out.append(' '*pe.loc + '^' + fatal) + out.append("FAIL: " + str(pe)) + success = success and failureTests + result = pe + except Exception as exc: + out.append("FAIL-EXCEPTION: " + str(exc)) + success = success and failureTests + result = exc + + if printResults: + if fullDump: + out.append('') + print('\n'.join(out)) + + allResults.append((t, result)) + + return success, allResults + + +class Token(ParserElement): + """ + Abstract C{ParserElement} subclass, for defining atomic matching patterns. + """ + def __init__( self ): + super(Token,self).__init__( savelist=False ) + + +class Empty(Token): + """ + An empty token, will always match. + """ + def __init__( self ): + super(Empty,self).__init__() + self.name = "Empty" + self.mayReturnEmpty = True + self.mayIndexError = False + + +class NoMatch(Token): + """ + A token that will never match. + """ + def __init__( self ): + super(NoMatch,self).__init__() + self.name = "NoMatch" + self.mayReturnEmpty = True + self.mayIndexError = False + self.errmsg = "Unmatchable token" + + def parseImpl( self, instring, loc, doActions=True ): + raise ParseException(instring, loc, self.errmsg, self) + + +class Literal(Token): + """ + Token to exactly match a specified string. + + Example:: + Literal('blah').parseString('blah') # -> ['blah'] + Literal('blah').parseString('blahfooblah') # -> ['blah'] + Literal('blah').parseString('bla') # -> Exception: Expected "blah" + + For case-insensitive matching, use L{CaselessLiteral}. + + For keyword matching (force word break before and after the matched string), + use L{Keyword} or L{CaselessKeyword}. + """ + def __init__( self, matchString ): + super(Literal,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Literal; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.__class__ = Empty + self.name = '"%s"' % _ustr(self.match) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + + # Performance tuning: this routine gets called a *lot* + # if this is a single character match string and the first character matches, + # short-circuit as quickly as possible, and avoid calling startswith + #~ @profile + def parseImpl( self, instring, loc, doActions=True ): + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) +_L = Literal +ParserElement._literalStringClass = Literal + +class Keyword(Token): + """ + Token to exactly match a specified string as a keyword, that is, it must be + immediately followed by a non-keyword character. Compare with C{L{Literal}}: + - C{Literal("if")} will match the leading C{'if'} in C{'ifAndOnlyIf'}. + - C{Keyword("if")} will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} + Accepts two optional constructor arguments in addition to the keyword string: + - C{identChars} is a string of characters that would be valid identifier characters, + defaulting to all alphanumerics + "_" and "$" + - C{caseless} allows case-insensitive matching, default is C{False}. + + Example:: + Keyword("start").parseString("start") # -> ['start'] + Keyword("start").parseString("starting") # -> Exception + + For case-insensitive matching, use L{CaselessKeyword}. + """ + DEFAULT_KEYWORD_CHARS = alphanums+"_$" + + def __init__( self, matchString, identChars=None, caseless=False ): + super(Keyword,self).__init__() + if identChars is None: + identChars = Keyword.DEFAULT_KEYWORD_CHARS + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Keyword; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.name = '"%s"' % self.match + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + self.caseless = caseless + if caseless: + self.caselessmatch = matchString.upper() + identChars = identChars.upper() + self.identChars = set(identChars) + + def parseImpl( self, instring, loc, doActions=True ): + if self.caseless: + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and + (loc == 0 or instring[loc-1].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + else: + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and + (loc == 0 or instring[loc-1] not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + + def copy(self): + c = super(Keyword,self).copy() + c.identChars = Keyword.DEFAULT_KEYWORD_CHARS + return c + + @staticmethod + def setDefaultKeywordChars( chars ): + """Overrides the default Keyword chars + """ + Keyword.DEFAULT_KEYWORD_CHARS = chars + +class CaselessLiteral(Literal): + """ + Token to match a specified string, ignoring case of letters. + Note: the matched results will always be in the case of the given + match string, NOT the case of the input text. + + Example:: + OneOrMore(CaselessLiteral("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD', 'CMD'] + + (Contrast with example for L{CaselessKeyword}.) + """ + def __init__( self, matchString ): + super(CaselessLiteral,self).__init__( matchString.upper() ) + # Preserve the defining literal. + self.returnString = matchString + self.name = "'%s'" % self.returnString + self.errmsg = "Expected " + self.name + + def parseImpl( self, instring, loc, doActions=True ): + if instring[ loc:loc+self.matchLen ].upper() == self.match: + return loc+self.matchLen, self.returnString + raise ParseException(instring, loc, self.errmsg, self) + +class CaselessKeyword(Keyword): + """ + Caseless version of L{Keyword}. + + Example:: + OneOrMore(CaselessKeyword("CMD")).parseString("cmd CMD Cmd10") # -> ['CMD', 'CMD'] + + (Contrast with example for L{CaselessLiteral}.) + """ + def __init__( self, matchString, identChars=None ): + super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) + + def parseImpl( self, instring, loc, doActions=True ): + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + +class CloseMatch(Token): + """ + A variation on L{Literal} which matches "close" matches, that is, + strings with at most 'n' mismatching characters. C{CloseMatch} takes parameters: + - C{match_string} - string to be matched + - C{maxMismatches} - (C{default=1}) maximum number of mismatches allowed to count as a match + + The results from a successful parse will contain the matched text from the input string and the following named results: + - C{mismatches} - a list of the positions within the match_string where mismatches were found + - C{original} - the original match_string used to compare against the input string + + If C{mismatches} is an empty list, then the match was an exact match. + + Example:: + patt = CloseMatch("ATCATCGAATGGA") + patt.parseString("ATCATCGAAXGGA") # -> (['ATCATCGAAXGGA'], {'mismatches': [[9]], 'original': ['ATCATCGAATGGA']}) + patt.parseString("ATCAXCGAAXGGA") # -> Exception: Expected 'ATCATCGAATGGA' (with up to 1 mismatches) (at char 0), (line:1, col:1) + + # exact match + patt.parseString("ATCATCGAATGGA") # -> (['ATCATCGAATGGA'], {'mismatches': [[]], 'original': ['ATCATCGAATGGA']}) + + # close match allowing up to 2 mismatches + patt = CloseMatch("ATCATCGAATGGA", maxMismatches=2) + patt.parseString("ATCAXCGAAXGGA") # -> (['ATCAXCGAAXGGA'], {'mismatches': [[4, 9]], 'original': ['ATCATCGAATGGA']}) + """ + def __init__(self, match_string, maxMismatches=1): + super(CloseMatch,self).__init__() + self.name = match_string + self.match_string = match_string + self.maxMismatches = maxMismatches + self.errmsg = "Expected %r (with up to %d mismatches)" % (self.match_string, self.maxMismatches) + self.mayIndexError = False + self.mayReturnEmpty = False + + def parseImpl( self, instring, loc, doActions=True ): + start = loc + instrlen = len(instring) + maxloc = start + len(self.match_string) + + if maxloc <= instrlen: + match_string = self.match_string + match_stringloc = 0 + mismatches = [] + maxMismatches = self.maxMismatches + + for match_stringloc,s_m in enumerate(zip(instring[loc:maxloc], self.match_string)): + src,mat = s_m + if src != mat: + mismatches.append(match_stringloc) + if len(mismatches) > maxMismatches: + break + else: + loc = match_stringloc + 1 + results = ParseResults([instring[start:loc]]) + results['original'] = self.match_string + results['mismatches'] = mismatches + return loc, results + + raise ParseException(instring, loc, self.errmsg, self) + + +class Word(Token): + """ + Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, + an optional string containing allowed body characters (if omitted, + defaults to the initial character set), and an optional minimum, + maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. An optional + C{excludeChars} parameter can list characters that might be found in + the input C{bodyChars} string; useful to define a word of all printables + except for one or two characters, for instance. + + L{srange} is useful for defining custom character set strings for defining + C{Word} expressions, using range notation from regular expression character sets. + + A common mistake is to use C{Word} to match a specific literal string, as in + C{Word("Address")}. Remember that C{Word} uses the string argument to define + I{sets} of matchable characters. This expression would match "Add", "AAA", + "dAred", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'. + To match an exact literal string, use L{Literal} or L{Keyword}. + + pyparsing includes helper strings for building Words: + - L{alphas} + - L{nums} + - L{alphanums} + - L{hexnums} + - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.) + - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.) + - L{printables} (any non-whitespace character) + + Example:: + # a word composed of digits + integer = Word(nums) # equivalent to Word("0123456789") or Word(srange("0-9")) + + # a word with a leading capital, and zero or more lowercase + capital_word = Word(alphas.upper(), alphas.lower()) + + # hostnames are alphanumeric, with leading alpha, and '-' + hostname = Word(alphas, alphanums+'-') + + # roman numeral (not a strict parser, accepts invalid mix of characters) + roman = Word("IVXLCDM") + + # any string of non-whitespace characters, except for ',' + csv_value = Word(printables, excludeChars=",") + """ + def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): + super(Word,self).__init__() + if excludeChars: + initChars = ''.join(c for c in initChars if c not in excludeChars) + if bodyChars: + bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) + self.initCharsOrig = initChars + self.initChars = set(initChars) + if bodyChars : + self.bodyCharsOrig = bodyChars + self.bodyChars = set(bodyChars) + else: + self.bodyCharsOrig = initChars + self.bodyChars = set(initChars) + + self.maxSpecified = max > 0 + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.asKeyword = asKeyword + + if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): + if self.bodyCharsOrig == self.initCharsOrig: + self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) + elif len(self.initCharsOrig) == 1: + self.reString = "%s[%s]*" % \ + (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + else: + self.reString = "[%s][%s]*" % \ + (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + if self.asKeyword: + self.reString = r"\b"+self.reString+r"\b" + try: + self.re = re.compile( self.reString ) + except Exception: + self.re = None + + def parseImpl( self, instring, loc, doActions=True ): + if self.re: + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + return loc, result.group() + + if not(instring[ loc ] in self.initChars): + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + instrlen = len(instring) + bodychars = self.bodyChars + maxloc = start + self.maxLen + maxloc = min( maxloc, instrlen ) + while loc < maxloc and instring[loc] in bodychars: + loc += 1 + + throwException = False + if loc - start < self.minLen: + throwException = True + if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + throwException = True + if self.asKeyword: + if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): + throwException = True + + if throwException: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(Word,self).__str__() + except Exception: + pass + + + if self.strRepr is None: + + def charsAsStr(s): + if len(s)>4: + return s[:4]+"..." + else: + return s + + if ( self.initCharsOrig != self.bodyCharsOrig ): + self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) + else: + self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) + + return self.strRepr + + +class Regex(Token): + r""" + Token for matching strings that match a given regular expression. + Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. + If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as + named parse results. + + Example:: + realnum = Regex(r"[+-]?\d+\.\d*") + date = Regex(r'(?P<year>\d{4})-(?P<month>\d\d?)-(?P<day>\d\d?)') + # ref: http://stackoverflow.com/questions/267399/how-do-you-match-only-valid-roman-numerals-with-a-regular-expression + roman = Regex(r"M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})") + """ + compiledREtype = type(re.compile("[A-Z]")) + def __init__( self, pattern, flags=0): + """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" + super(Regex,self).__init__() + + if isinstance(pattern, basestring): + if not pattern: + warnings.warn("null string passed to Regex; use Empty() instead", + SyntaxWarning, stacklevel=2) + + self.pattern = pattern + self.flags = flags + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % pattern, + SyntaxWarning, stacklevel=2) + raise + + elif isinstance(pattern, Regex.compiledREtype): + self.re = pattern + self.pattern = \ + self.reString = str(pattern) + self.flags = flags + + else: + raise ValueError("Regex may only be constructed with a string or a compiled RE object") + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + d = result.groupdict() + ret = ParseResults(result.group()) + if d: + for k in d: + ret[k] = d[k] + return loc,ret + + def __str__( self ): + try: + return super(Regex,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "Re:(%s)" % repr(self.pattern) + + return self.strRepr + + +class QuotedString(Token): + r""" + Token for matching strings that are delimited by quoting characters. + + Defined with the following parameters: + - quoteChar - string of one or more characters defining the quote delimiting string + - escChar - character to escape quotes, typically backslash (default=C{None}) + - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=C{None}) + - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) + - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) + - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) + - convertWhitespaceEscapes - convert escaped whitespace (C{'\t'}, C{'\n'}, etc.) to actual whitespace (default=C{True}) + + Example:: + qs = QuotedString('"') + print(qs.searchString('lsjdf "This is the quote" sldjf')) + complex_qs = QuotedString('{{', endQuoteChar='}}') + print(complex_qs.searchString('lsjdf {{This is the "quote"}} sldjf')) + sql_qs = QuotedString('"', escQuote='""') + print(sql_qs.searchString('lsjdf "This is the quote with ""embedded"" quotes" sldjf')) + prints:: + [['This is the quote']] + [['This is the "quote"']] + [['This is the quote with "embedded" quotes']] + """ + def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None, convertWhitespaceEscapes=True): + super(QuotedString,self).__init__() + + # remove white space from quote chars - wont work anyway + quoteChar = quoteChar.strip() + if not quoteChar: + warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if not endQuoteChar: + warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + self.convertWhitespaceEscapes = convertWhitespaceEscapes + + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = r'%s(?:[^%s%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + else: + self.flags = 0 + self.pattern = r'%s(?:[^%s\n\r%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + if len(self.endQuoteChar) > 1: + self.pattern += ( + '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' + ) + if escQuote: + self.pattern += (r'|(?:%s)' % re.escape(escQuote)) + if escChar: + self.pattern += (r'|(?:%s.)' % re.escape(escChar)) + self.escCharReplacePattern = re.escape(self.escChar)+"(.)" + self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, + SyntaxWarning, stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.group() + + if self.unquoteResults: + + # strip off quotes + ret = ret[self.quoteCharLen:-self.endQuoteCharLen] + + if isinstance(ret,basestring): + # replace escaped whitespace + if '\\' in ret and self.convertWhitespaceEscapes: + ws_map = { + r'\t' : '\t', + r'\n' : '\n', + r'\f' : '\f', + r'\r' : '\r', + } + for wslit,wschar in ws_map.items(): + ret = ret.replace(wslit, wschar) + + # replace escaped characters + if self.escChar: + ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret) + + # replace escaped quotes + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + + return loc, ret + + def __str__( self ): + try: + return super(QuotedString,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + + return self.strRepr + + +class CharsNotIn(Token): + """ + Token for matching words composed of characters I{not} in a given set (will + include whitespace in matched characters if not listed in the provided exclusion set - see example). + Defined with string containing all disallowed characters, and an optional + minimum, maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + + Example:: + # define a comma-separated-value as anything that is not a ',' + csv_value = CharsNotIn(',') + print(delimitedList(csv_value).parseString("dkls,lsdkjf,s12 34,@!#,213")) + prints:: + ['dkls', 'lsdkjf', 's12 34', '@!#', '213'] + """ + def __init__( self, notChars, min=1, max=0, exact=0 ): + super(CharsNotIn,self).__init__() + self.skipWhitespace = False + self.notChars = notChars + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = ( self.minLen == 0 ) + self.mayIndexError = False + + def parseImpl( self, instring, loc, doActions=True ): + if instring[loc] in self.notChars: + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + notchars = self.notChars + maxlen = min( start+self.maxLen, len(instring) ) + while loc < maxlen and \ + (instring[loc] not in notchars): + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(CharsNotIn, self).__str__() + except Exception: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[:4] + else: + self.strRepr = "!W:(%s)" % self.notChars + + return self.strRepr + +class White(Token): + """ + Special matching class for matching whitespace. Normally, whitespace is ignored + by pyparsing grammars. This class is included when some whitespace structures + are significant. Define with a string containing the whitespace characters to be + matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, + as defined for the C{L{Word}} class. + """ + whiteStrs = { + " " : "<SPC>", + "\t": "<TAB>", + "\n": "<LF>", + "\r": "<CR>", + "\f": "<FF>", + } + def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): + super(White,self).__init__() + self.matchWhite = ws + self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) + #~ self.leaveWhitespace() + self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl( self, instring, loc, doActions=True ): + if not(instring[ loc ] in self.matchWhite): + raise ParseException(instring, loc, self.errmsg, self) + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min( maxloc, len(instring) ) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + +class _PositionToken(Token): + def __init__( self ): + super(_PositionToken,self).__init__() + self.name=self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + +class GoToColumn(_PositionToken): + """ + Token to advance to a specific column of input text; useful for tabular report scraping. + """ + def __init__( self, colno ): + super(GoToColumn,self).__init__() + self.col = colno + + def preParse( self, instring, loc ): + if col(loc,instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + thiscol = col( loc, instring ) + if thiscol > self.col: + raise ParseException( instring, loc, "Text not in expected column", self ) + newloc = loc + self.col - thiscol + ret = instring[ loc: newloc ] + return newloc, ret + + +class LineStart(_PositionToken): + """ + Matches if current position is at the beginning of a line within the parse string + + Example:: + + test = '''\ + AAA this line + AAA and this line + AAA but not this one + B AAA and definitely not this one + ''' + + for t in (LineStart() + 'AAA' + restOfLine).searchString(test): + print(t) + + Prints:: + ['AAA', ' this line'] + ['AAA', ' and this line'] + + """ + def __init__( self ): + super(LineStart,self).__init__() + self.errmsg = "Expected start of line" + + def parseImpl( self, instring, loc, doActions=True ): + if col(loc, instring) == 1: + return loc, [] + raise ParseException(instring, loc, self.errmsg, self) + +class LineEnd(_PositionToken): + """ + Matches if current position is at the end of a line within the parse string + """ + def __init__( self ): + super(LineEnd,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected end of line" + + def parseImpl( self, instring, loc, doActions=True ): + if loc<len(instring): + if instring[loc] == "\n": + return loc+1, "\n" + else: + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class StringStart(_PositionToken): + """ + Matches if current position is at the beginning of the parse string + """ + def __init__( self ): + super(StringStart,self).__init__() + self.errmsg = "Expected start of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc != 0: + # see if entire string up to here is just whitespace and ignoreables + if loc != self.preParse( instring, 0 ): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class StringEnd(_PositionToken): + """ + Matches if current position is at the end of the parse string + """ + def __init__( self ): + super(StringEnd,self).__init__() + self.errmsg = "Expected end of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc < len(instring): + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + elif loc > len(instring): + return loc, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class WordStart(_PositionToken): + """ + Matches if the current position is at the beginning of a Word, and + is not preceded by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of + the string being parsed, or at the beginning of a line. + """ + def __init__(self, wordChars = printables): + super(WordStart,self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True ): + if loc != 0: + if (instring[loc-1] in self.wordChars or + instring[loc] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class WordEnd(_PositionToken): + """ + Matches if the current position is at the end of a Word, and + is not followed by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of + the string being parsed, or at the end of a line. + """ + def __init__(self, wordChars = printables): + super(WordEnd,self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True ): + instrlen = len(instring) + if instrlen>0 and loc<instrlen: + if (instring[loc] in self.wordChars or + instring[loc-1] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + +class ParseExpression(ParserElement): + """ + Abstract subclass of ParserElement, for combining and post-processing parsed tokens. + """ + def __init__( self, exprs, savelist = False ): + super(ParseExpression,self).__init__(savelist) + if isinstance( exprs, _generatorType ): + exprs = list(exprs) + + if isinstance( exprs, basestring ): + self.exprs = [ ParserElement._literalStringClass( exprs ) ] + elif isinstance( exprs, Iterable ): + exprs = list(exprs) + # if sequence of strings provided, wrap with Literal + if all(isinstance(expr, basestring) for expr in exprs): + exprs = map(ParserElement._literalStringClass, exprs) + self.exprs = list(exprs) + else: + try: + self.exprs = list( exprs ) + except TypeError: + self.exprs = [ exprs ] + self.callPreparse = False + + def __getitem__( self, i ): + return self.exprs[i] + + def append( self, other ): + self.exprs.append( other ) + self.strRepr = None + return self + + def leaveWhitespace( self ): + """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on + all contained expressions.""" + self.skipWhitespace = False + self.exprs = [ e.copy() for e in self.exprs ] + for e in self.exprs: + e.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + else: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + return self + + def __str__( self ): + try: + return super(ParseExpression,self).__str__() + except Exception: + pass + + if self.strRepr is None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) + return self.strRepr + + def streamline( self ): + super(ParseExpression,self).streamline() + + for e in self.exprs: + e.streamline() + + # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) + # but only if there are no parse actions or resultsNames on the nested And's + # (likewise for Or's and MatchFirst's) + if ( len(self.exprs) == 2 ): + other = self.exprs[0] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = other.exprs[:] + [ self.exprs[1] ] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + other = self.exprs[-1] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = self.exprs[:-1] + other.exprs[:] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + self.errmsg = "Expected " + _ustr(self) + + return self + + def setResultsName( self, name, listAllMatches=False ): + ret = super(ParseExpression,self).setResultsName(name,listAllMatches) + return ret + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + for e in self.exprs: + e.validate(tmp) + self.checkRecursion( [] ) + + def copy(self): + ret = super(ParseExpression,self).copy() + ret.exprs = [e.copy() for e in self.exprs] + return ret + +class And(ParseExpression): + """ + Requires all given C{ParseExpression}s to be found in the given order. + Expressions may be separated by whitespace. + May be constructed using the C{'+'} operator. + May also be constructed using the C{'-'} operator, which will suppress backtracking. + + Example:: + integer = Word(nums) + name_expr = OneOrMore(Word(alphas)) + + expr = And([integer("id"),name_expr("name"),integer("age")]) + # more easily written as: + expr = integer("id") + name_expr("name") + integer("age") + """ + + class _ErrorStop(Empty): + def __init__(self, *args, **kwargs): + super(And._ErrorStop,self).__init__(*args, **kwargs) + self.name = '-' + self.leaveWhitespace() + + def __init__( self, exprs, savelist = True ): + super(And,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.setWhitespaceChars( self.exprs[0].whiteChars ) + self.skipWhitespace = self.exprs[0].skipWhitespace + self.callPreparse = True + + def parseImpl( self, instring, loc, doActions=True ): + # pass False as last arg to _parse for first element, since we already + # pre-parsed the string as part of our And pre-parsing + loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) + errorStop = False + for e in self.exprs[1:]: + if isinstance(e, And._ErrorStop): + errorStop = True + continue + if errorStop: + try: + loc, exprtokens = e._parse( instring, loc, doActions ) + except ParseSyntaxException: + raise + except ParseBaseException as pe: + pe.__traceback__ = None + raise ParseSyntaxException._from_exception(pe) + except IndexError: + raise ParseSyntaxException(instring, len(instring), self.errmsg, self) + else: + loc, exprtokens = e._parse( instring, loc, doActions ) + if exprtokens or exprtokens.haskeys(): + resultlist += exprtokens + return loc, resultlist + + def __iadd__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #And( [ self, other ] ) + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + if not e.mayReturnEmpty: + break + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + +class Or(ParseExpression): + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the expression that matches the longest string will be used. + May be constructed using the C{'^'} operator. + + Example:: + # construct Or using '^' operator + + number = Word(nums) ^ Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) + prints:: + [['123'], ['3.1416'], ['789']] + """ + def __init__( self, exprs, savelist = False ): + super(Or,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + matches = [] + for e in self.exprs: + try: + loc2 = e.tryParse( instring, loc ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + else: + # save match among all matches, to retry longest to shortest + matches.append((loc2, e)) + + if matches: + matches.sort(key=lambda x: -x[0]) + for _,e in matches: + try: + return e._parse( instring, loc, doActions ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + + def __ixor__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #Or( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class MatchFirst(ParseExpression): + """ + Requires that at least one C{ParseExpression} is found. + If two expressions match, the first one listed is the one that will match. + May be constructed using the C{'|'} operator. + + Example:: + # construct MatchFirst using '|' operator + + # watch the order of expressions to match + number = Word(nums) | Combine(Word(nums) + '.' + Word(nums)) + print(number.searchString("123 3.1416 789")) # Fail! -> [['123'], ['3'], ['1416'], ['789']] + + # put more selective expression first + number = Combine(Word(nums) + '.' + Word(nums)) | Word(nums) + print(number.searchString("123 3.1416 789")) # Better -> [['123'], ['3.1416'], ['789']] + """ + def __init__( self, exprs, savelist = False ): + super(MatchFirst,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse( instring, loc, doActions ) + return ret + except ParseException as err: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + + # only got here if no expression matched, raise exception for match that made it the furthest + else: + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass( other ) + return self.append( other ) #MatchFirst( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class Each(ParseExpression): + """ + Requires all given C{ParseExpression}s to be found, but in any order. + Expressions may be separated by whitespace. + May be constructed using the C{'&'} operator. + + Example:: + color = oneOf("RED ORANGE YELLOW GREEN BLUE PURPLE BLACK WHITE BROWN") + shape_type = oneOf("SQUARE CIRCLE TRIANGLE STAR HEXAGON OCTAGON") + integer = Word(nums) + shape_attr = "shape:" + shape_type("shape") + posn_attr = "posn:" + Group(integer("x") + ',' + integer("y"))("posn") + color_attr = "color:" + color("color") + size_attr = "size:" + integer("size") + + # use Each (using operator '&') to accept attributes in any order + # (shape and posn are required, color and size are optional) + shape_spec = shape_attr & posn_attr & Optional(color_attr) & Optional(size_attr) + + shape_spec.runTests(''' + shape: SQUARE color: BLACK posn: 100, 120 + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + color:GREEN size:20 shape:TRIANGLE posn:20,40 + ''' + ) + prints:: + shape: SQUARE color: BLACK posn: 100, 120 + ['shape:', 'SQUARE', 'color:', 'BLACK', 'posn:', ['100', ',', '120']] + - color: BLACK + - posn: ['100', ',', '120'] + - x: 100 + - y: 120 + - shape: SQUARE + + + shape: CIRCLE size: 50 color: BLUE posn: 50,80 + ['shape:', 'CIRCLE', 'size:', '50', 'color:', 'BLUE', 'posn:', ['50', ',', '80']] + - color: BLUE + - posn: ['50', ',', '80'] + - x: 50 + - y: 80 + - shape: CIRCLE + - size: 50 + + + color: GREEN size: 20 shape: TRIANGLE posn: 20,40 + ['color:', 'GREEN', 'size:', '20', 'shape:', 'TRIANGLE', 'posn:', ['20', ',', '40']] + - color: GREEN + - posn: ['20', ',', '40'] + - x: 20 + - y: 40 + - shape: TRIANGLE + - size: 20 + """ + def __init__( self, exprs, savelist = True ): + super(Each,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.skipWhitespace = True + self.initExprGroups = True + + def parseImpl( self, instring, loc, doActions=True ): + if self.initExprGroups: + self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) + opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] + opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] + self.optionals = opt1 + opt2 + self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] + self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] + self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[:] + tmpOpt = self.optionals[:] + matchOrder = [] + + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse( instring, tmpLoc ) + except ParseException: + failed.append(e) + else: + matchOrder.append(self.opt1map.get(id(e),e)) + if e in tmpReqd: + tmpReqd.remove(e) + elif e in tmpOpt: + tmpOpt.remove(e) + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join(_ustr(e) for e in tmpReqd) + raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) + + # add any unmatched Optionals, in case they have default values defined + matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] + + resultlist = [] + for e in matchOrder: + loc,results = e._parse(instring,loc,doActions) + resultlist.append(results) + + finalResults = sum(resultlist, ParseResults([])) + return loc, finalResults + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class ParseElementEnhance(ParserElement): + """ + Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. + """ + def __init__( self, expr, savelist=False ): + super(ParseElementEnhance,self).__init__(savelist) + if isinstance( expr, basestring ): + if issubclass(ParserElement._literalStringClass, Token): + expr = ParserElement._literalStringClass(expr) + else: + expr = ParserElement._literalStringClass(Literal(expr)) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars( expr.whiteChars ) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr is not None: + return self.expr._parse( instring, loc, doActions, callPreParse=False ) + else: + raise ParseException("",loc,self.errmsg,self) + + def leaveWhitespace( self ): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + else: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + return self + + def streamline( self ): + super(ParseElementEnhance,self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion( self, parseElementList ): + if self in parseElementList: + raise RecursiveGrammarException( parseElementList+[self] ) + subRecCheckList = parseElementList[:] + [ self ] + if self.expr is not None: + self.expr.checkRecursion( subRecCheckList ) + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion( [] ) + + def __str__( self ): + try: + return super(ParseElementEnhance,self).__str__() + except Exception: + pass + + if self.strRepr is None and self.expr is not None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + """ + Lookahead matching of the given parse expression. C{FollowedBy} + does I{not} advance the parsing position within the input string, it only + verifies that the specified parse expression matches at the current + position. C{FollowedBy} always returns a null token list. + + Example:: + # use FollowedBy to match a label only if it is followed by a ':' + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + OneOrMore(attr_expr).parseString("shape: SQUARE color: BLACK posn: upper left").pprint() + prints:: + [['shape', 'SQUARE'], ['color', 'BLACK'], ['posn', 'upper left']] + """ + def __init__( self, expr ): + super(FollowedBy,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + self.expr.tryParse( instring, loc ) + return loc, [] + + +class NotAny(ParseElementEnhance): + """ + Lookahead to disallow matching with the given parse expression. C{NotAny} + does I{not} advance the parsing position within the input string, it only + verifies that the specified parse expression does I{not} match at the current + position. Also, C{NotAny} does I{not} skip over leading whitespace. C{NotAny} + always returns a null token list. May be constructed using the '~' operator. + + Example:: + + """ + def __init__( self, expr ): + super(NotAny,self).__init__(expr) + #~ self.leaveWhitespace() + self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr.canParseNext(instring, loc): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + + return self.strRepr + +class _MultipleMatch(ParseElementEnhance): + def __init__( self, expr, stopOn=None): + super(_MultipleMatch, self).__init__(expr) + self.saveAsList = True + ender = stopOn + if isinstance(ender, basestring): + ender = ParserElement._literalStringClass(ender) + self.not_ender = ~ender if ender is not None else None + + def parseImpl( self, instring, loc, doActions=True ): + self_expr_parse = self.expr._parse + self_skip_ignorables = self._skipIgnorables + check_ender = self.not_ender is not None + if check_ender: + try_not_ender = self.not_ender.tryParse + + # must be at least one (but first see if we are the stopOn sentinel; + # if so, fail) + if check_ender: + try_not_ender(instring, loc) + loc, tokens = self_expr_parse( instring, loc, doActions, callPreParse=False ) + try: + hasIgnoreExprs = (not not self.ignoreExprs) + while 1: + if check_ender: + try_not_ender(instring, loc) + if hasIgnoreExprs: + preloc = self_skip_ignorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self_expr_parse( instring, preloc, doActions ) + if tmptokens or tmptokens.haskeys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + +class OneOrMore(_MultipleMatch): + """ + Repetition of one or more of the given expression. + + Parameters: + - expr - expression that must match one or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: BLACK" + OneOrMore(attr_expr).parseString(text).pprint() # Fail! read 'color' as data instead of next label -> [['shape', 'SQUARE color']] + + # use stopOn attribute for OneOrMore to avoid reading label string as part of the data + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + OneOrMore(attr_expr).parseString(text).pprint() # Better -> [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'BLACK']] + + # could also be written as + (attr_expr * (1,)).parseString(text).pprint() + """ + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + + return self.strRepr + +class ZeroOrMore(_MultipleMatch): + """ + Optional repetition of zero or more of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - stopOn - (default=C{None}) - expression for a terminating sentinel + (only required if the sentinel would ordinarily match the repetition + expression) + + Example: similar to L{OneOrMore} + """ + def __init__( self, expr, stopOn=None): + super(ZeroOrMore,self).__init__(expr, stopOn=stopOn) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + return super(ZeroOrMore, self).parseImpl(instring, loc, doActions) + except (ParseException,IndexError): + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr + +class _NullToken(object): + def __bool__(self): + return False + __nonzero__ = __bool__ + def __str__(self): + return "" + +_optionalNotMatched = _NullToken() +class Optional(ParseElementEnhance): + """ + Optional matching of the given expression. + + Parameters: + - expr - expression that must match zero or more times + - default (optional) - value to be returned if the optional expression is not found. + + Example:: + # US postal code can be a 5-digit zip, plus optional 4-digit qualifier + zip = Combine(Word(nums, exact=5) + Optional('-' + Word(nums, exact=4))) + zip.runTests(''' + # traditional ZIP code + 12345 + + # ZIP+4 form + 12101-0001 + + # invalid ZIP + 98765- + ''') + prints:: + # traditional ZIP code + 12345 + ['12345'] + + # ZIP+4 form + 12101-0001 + ['12101-0001'] + + # invalid ZIP + 98765- + ^ + FAIL: Expected end of text (at char 5), (line:1, col:6) + """ + def __init__( self, expr, default=_optionalNotMatched ): + super(Optional,self).__init__( expr, savelist=False ) + self.saveAsList = self.expr.saveAsList + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + except (ParseException,IndexError): + if self.defaultValue is not _optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([ self.defaultValue ]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [ self.defaultValue ] + else: + tokens = [] + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + + return self.strRepr + +class SkipTo(ParseElementEnhance): + """ + Token for skipping over all undefined text until the matched expression is found. + + Parameters: + - expr - target expression marking the end of the data to be skipped + - include - (default=C{False}) if True, the target expression is also parsed + (the skipped text and target expression are returned as a 2-element list). + - ignore - (default=C{None}) used to define grammars (typically quoted strings and + comments) that might contain false matches to the target expression + - failOn - (default=C{None}) define expressions that are not allowed to be + included in the skipped test; if found before the target expression is found, + the SkipTo is not a match + + Example:: + report = ''' + Outstanding Issues Report - 1 Jan 2000 + + # | Severity | Description | Days Open + -----+----------+-------------------------------------------+----------- + 101 | Critical | Intermittent system crash | 6 + 94 | Cosmetic | Spelling error on Login ('log|n') | 14 + 79 | Minor | System slow when running too many reports | 47 + ''' + integer = Word(nums) + SEP = Suppress('|') + # use SkipTo to simply match everything up until the next SEP + # - ignore quoted strings, so that a '|' character inside a quoted string does not match + # - parse action will call token.strip() for each matched token, i.e., the description body + string_data = SkipTo(SEP, ignore=quotedString) + string_data.setParseAction(tokenMap(str.strip)) + ticket_expr = (integer("issue_num") + SEP + + string_data("sev") + SEP + + string_data("desc") + SEP + + integer("days_open")) + + for tkt in ticket_expr.searchString(report): + print tkt.dump() + prints:: + ['101', 'Critical', 'Intermittent system crash', '6'] + - days_open: 6 + - desc: Intermittent system crash + - issue_num: 101 + - sev: Critical + ['94', 'Cosmetic', "Spelling error on Login ('log|n')", '14'] + - days_open: 14 + - desc: Spelling error on Login ('log|n') + - issue_num: 94 + - sev: Cosmetic + ['79', 'Minor', 'System slow when running too many reports', '47'] + - days_open: 47 + - desc: System slow when running too many reports + - issue_num: 79 + - sev: Minor + """ + def __init__( self, other, include=False, ignore=None, failOn=None ): + super( SkipTo, self ).__init__( other ) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.asList = False + if isinstance(failOn, basestring): + self.failOn = ParserElement._literalStringClass(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + startloc = loc + instrlen = len(instring) + expr = self.expr + expr_parse = self.expr._parse + self_failOn_canParseNext = self.failOn.canParseNext if self.failOn is not None else None + self_ignoreExpr_tryParse = self.ignoreExpr.tryParse if self.ignoreExpr is not None else None + + tmploc = loc + while tmploc <= instrlen: + if self_failOn_canParseNext is not None: + # break if failOn expression matches + if self_failOn_canParseNext(instring, tmploc): + break + + if self_ignoreExpr_tryParse is not None: + # advance past ignore expressions + while 1: + try: + tmploc = self_ignoreExpr_tryParse(instring, tmploc) + except ParseBaseException: + break + + try: + expr_parse(instring, tmploc, doActions=False, callPreParse=False) + except (ParseException, IndexError): + # no match, advance loc in string + tmploc += 1 + else: + # matched skipto expr, done + break + + else: + # ran off the end of the input string without matching skipto expr, fail + raise ParseException(instring, loc, self.errmsg, self) + + # build up return values + loc = tmploc + skiptext = instring[startloc:loc] + skipresult = ParseResults(skiptext) + + if self.includeMatch: + loc, mat = expr_parse(instring,loc,doActions,callPreParse=False) + skipresult += mat + + return loc, skipresult + +class Forward(ParseElementEnhance): + """ + Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. + + Note: take care when assigning to C{Forward} not to overlook precedence of operators. + Specifically, '|' has a lower precedence than '<<', so that:: + fwdExpr << a | b | c + will actually be evaluated as:: + (fwdExpr << a) | b | c + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the C{Forward}:: + fwdExpr << (a | b | c) + Converting to use the '<<=' operator instead will avoid this problem. + + See L{ParseResults.pprint} for an example of a recursive parser created using + C{Forward}. + """ + def __init__( self, other=None ): + super(Forward,self).__init__( other, savelist=False ) + + def __lshift__( self, other ): + if isinstance( other, basestring ): + other = ParserElement._literalStringClass(other) + self.expr = other + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars( self.expr.whiteChars ) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return self + + def __ilshift__(self, other): + return self << other + + def leaveWhitespace( self ): + self.skipWhitespace = False + return self + + def streamline( self ): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate( self, validateTrace=[] ): + if self not in validateTrace: + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + return self.__class__.__name__ + ": ..." + + # stubbed out for now - creates awful memory and perf issues + self._revertClass = self.__class__ + self.__class__ = _ForwardNoRecurse + try: + if self.expr is not None: + retString = _ustr(self.expr) + else: + retString = "None" + finally: + self.__class__ = self._revertClass + return self.__class__.__name__ + ": " + retString + + def copy(self): + if self.expr is not None: + return super(Forward,self).copy() + else: + ret = Forward() + ret <<= self + return ret + +class _ForwardNoRecurse(Forward): + def __str__( self ): + return "..." + +class TokenConverter(ParseElementEnhance): + """ + Abstract subclass of C{ParseExpression}, for converting parsed results. + """ + def __init__( self, expr, savelist=False ): + super(TokenConverter,self).__init__( expr )#, savelist ) + self.saveAsList = False + +class Combine(TokenConverter): + """ + Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the input string; + this can be disabled by specifying C{'adjacent=False'} in the constructor. + + Example:: + real = Word(nums) + '.' + Word(nums) + print(real.parseString('3.1416')) # -> ['3', '.', '1416'] + # will also erroneously match the following + print(real.parseString('3. 1416')) # -> ['3', '.', '1416'] + + real = Combine(Word(nums) + '.' + Word(nums)) + print(real.parseString('3.1416')) # -> ['3.1416'] + # no match when there are internal spaces + print(real.parseString('3. 1416')) # -> Exception: Expected W:(0123...) + """ + def __init__( self, expr, joinString="", adjacent=True ): + super(Combine,self).__init__( expr ) + # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore( self, other ): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super( Combine, self).ignore( other ) + return self + + def postParse( self, instring, loc, tokenlist ): + retToks = tokenlist.copy() + del retToks[:] + retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) + + if self.resultsName and retToks.haskeys(): + return [ retToks ] + else: + return retToks + +class Group(TokenConverter): + """ + Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions. + + Example:: + ident = Word(alphas) + num = Word(nums) + term = ident | num + func = ident + Optional(delimitedList(term)) + print(func.parseString("fn a,b,100")) # -> ['fn', 'a', 'b', '100'] + + func = ident + Group(Optional(delimitedList(term))) + print(func.parseString("fn a,b,100")) # -> ['fn', ['a', 'b', '100']] + """ + def __init__( self, expr ): + super(Group,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + return [ tokenlist ] + +class Dict(TokenConverter): + """ + Converter to return a repetitive expression as a list, but also as a dictionary. + Each element can also be referenced using the first token in the expression as its key. + Useful for tabular report scraping when the first column can be used as a item key. + + Example:: + data_word = Word(alphas) + label = data_word + FollowedBy(':') + attr_expr = Group(label + Suppress(':') + OneOrMore(data_word).setParseAction(' '.join)) + + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + + # print attributes as plain groups + print(OneOrMore(attr_expr).parseString(text).dump()) + + # instead of OneOrMore(expr), parse using Dict(OneOrMore(Group(expr))) - Dict will auto-assign names + result = Dict(OneOrMore(Group(attr_expr))).parseString(text) + print(result.dump()) + + # access named fields as dict entries, or output as dict + print(result['shape']) + print(result.asDict()) + prints:: + ['shape', 'SQUARE', 'posn', 'upper left', 'color', 'light blue', 'texture', 'burlap'] + + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + {'color': 'light blue', 'posn': 'upper left', 'texture': 'burlap', 'shape': 'SQUARE'} + See more examples at L{ParseResults} of accessing fields by results name. + """ + def __init__( self, expr ): + super(Dict,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + for i,tok in enumerate(tokenlist): + if len(tok) == 0: + continue + ikey = tok[0] + if isinstance(ikey,int): + ikey = _ustr(tok[0]).strip() + if len(tok)==1: + tokenlist[ikey] = _ParseResultsWithOffset("",i) + elif len(tok)==2 and not isinstance(tok[1],ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) + else: + dictvalue = tok.copy() #ParseResults(i) + del dictvalue[0] + if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) + else: + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) + + if self.resultsName: + return [ tokenlist ] + else: + return tokenlist + + +class Suppress(TokenConverter): + """ + Converter for ignoring the results of a parsed expression. + + Example:: + source = "a, b, c,d" + wd = Word(alphas) + wd_list1 = wd + ZeroOrMore(',' + wd) + print(wd_list1.parseString(source)) + + # often, delimiters that are useful during parsing are just in the + # way afterward - use Suppress to keep them out of the parsed output + wd_list2 = wd + ZeroOrMore(Suppress(',') + wd) + print(wd_list2.parseString(source)) + prints:: + ['a', ',', 'b', ',', 'c', ',', 'd'] + ['a', 'b', 'c', 'd'] + (See also L{delimitedList}.) + """ + def postParse( self, instring, loc, tokenlist ): + return [] + + def suppress( self ): + return self + + +class OnlyOnce(object): + """ + Wrapper for parse actions, to ensure they are only called once. + """ + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + def __call__(self,s,l,t): + if not self.called: + results = self.callable(s,l,t) + self.called = True + return results + raise ParseException(s,l,"") + def reset(self): + self.called = False + +def traceParseAction(f): + """ + Decorator for debugging parse actions. + + When the parse action is called, this decorator will print C{">> entering I{method-name}(line:I{current_source_line}, I{parse_location}, I{matched_tokens})".} + When the parse action completes, the decorator will print C{"<<"} followed by the returned value, or any exception that the parse action raised. + + Example:: + wd = Word(alphas) + + @traceParseAction + def remove_duplicate_chars(tokens): + return ''.join(sorted(set(''.join(tokens)))) + + wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) + print(wds.parseString("slkdjs sld sldd sdlf sdljf")) + prints:: + >>entering remove_duplicate_chars(line: 'slkdjs sld sldd sdlf sdljf', 0, (['slkdjs', 'sld', 'sldd', 'sdlf', 'sdljf'], {})) + <<leaving remove_duplicate_chars (ret: 'dfjkls') + ['dfjkls'] + """ + f = _trim_arity(f) + def z(*paArgs): + thisFunc = f.__name__ + s,l,t = paArgs[-3:] + if len(paArgs)>3: + thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc + sys.stderr.write( ">>entering %s(line: '%s', %d, %r)\n" % (thisFunc,line(l,s),l,t) ) + try: + ret = f(*paArgs) + except Exception as exc: + sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) + raise + sys.stderr.write( "<<leaving %s (ret: %r)\n" % (thisFunc,ret) ) + return ret + try: + z.__name__ = f.__name__ + except AttributeError: + pass + return z + +# +# global helpers +# +def delimitedList( expr, delim=",", combine=False ): + """ + Helper to define a delimited list of expressions - the delimiter defaults to ','. + By default, the list elements and delimiters can have intervening whitespace, and + comments, but this can be overridden by passing C{combine=True} in the constructor. + If C{combine} is set to C{True}, the matching tokens are returned as a single token + string, with the delimiters included; otherwise, the matching tokens are returned + as a list of tokens, with the delimiters suppressed. + + Example:: + delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc'] + delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE'] + """ + dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." + if combine: + return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) + else: + return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) + +def countedArray( expr, intExpr=None ): + """ + Helper to define a counted list of expressions. + This helper defines a pattern of the form:: + integer expr expr expr... + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. + + If C{intExpr} is specified, it should be a pyparsing expression that produces an integer value. + + Example:: + countedArray(Word(alphas)).parseString('2 ab cd ef') # -> ['ab', 'cd'] + + # in this parser, the leading integer value is given in binary, + # '10' indicating that 2 values are in the array + binaryConstant = Word('01').setParseAction(lambda t: int(t[0], 2)) + countedArray(Word(alphas), intExpr=binaryConstant).parseString('10 ab cd ef') # -> ['ab', 'cd'] + """ + arrayExpr = Forward() + def countFieldParseAction(s,l,t): + n = t[0] + arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) + return [] + if intExpr is None: + intExpr = Word(nums).setParseAction(lambda t:int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.setName("arrayLen") + intExpr.addParseAction(countFieldParseAction, callDuringTry=True) + return ( intExpr + arrayExpr ).setName('(len) ' + _ustr(expr) + '...') + +def _flatten(L): + ret = [] + for i in L: + if isinstance(i,list): + ret.extend(_flatten(i)) + else: + ret.append(i) + return ret + +def matchPreviousLiteral(expr): + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousLiteral(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches a + previous literal, will also match the leading C{"1:1"} in C{"1:10"}. + If this is not desired, use C{matchPreviousExpr}. + Do I{not} use with packrat parsing enabled. + """ + rep = Forward() + def copyTokenToRepeater(s,l,t): + if t: + if len(t) == 1: + rep << t[0] + else: + # flatten t tokens + tflat = _flatten(t.asList()) + rep << And(Literal(tt) for tt in tflat) + else: + rep << Empty() + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def matchPreviousExpr(expr): + """ + Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousExpr(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches by + expressions, will I{not} match the leading C{"1:1"} in C{"1:10"}; + the expressions are evaluated first, and then compared, so + C{"1"} is compared with C{"10"}. + Do I{not} use with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + def copyTokenToRepeater(s,l,t): + matchTokens = _flatten(t.asList()) + def mustMatchTheseTokens(s,l,t): + theseTokens = _flatten(t.asList()) + if theseTokens != matchTokens: + raise ParseException("",0,"") + rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + rep.setName('(prev) ' + _ustr(expr)) + return rep + +def _escapeRegexRangeChars(s): + #~ escape these chars: ^-] + for c in r"\^-]": + s = s.replace(c,_bslash+c) + s = s.replace("\n",r"\n") + s = s.replace("\t",r"\t") + return _ustr(s) + +def oneOf( strs, caseless=False, useRegex=True ): + """ + Helper to quickly define a set of alternative Literals, and makes sure to do + longest-first testing when there is a conflict, regardless of the input order, + but returns a C{L{MatchFirst}} for best performance. + + Parameters: + - strs - a string of space-delimited literals, or a collection of string literals + - caseless - (default=C{False}) - treat all literals as caseless + - useRegex - (default=C{True}) - as an optimization, will generate a Regex + object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or + if creating a C{Regex} raises an exception) + + Example:: + comp_oper = oneOf("< = > <= >= !=") + var = Word(alphas) + number = Word(nums) + term = var | number + comparison_expr = term + comp_oper + term + print(comparison_expr.searchString("B = 12 AA=23 B<=AA AA>12")) + prints:: + [['B', '=', '12'], ['AA', '=', '23'], ['B', '<=', 'AA'], ['AA', '>', '12']] + """ + if caseless: + isequal = ( lambda a,b: a.upper() == b.upper() ) + masks = ( lambda a,b: b.upper().startswith(a.upper()) ) + parseElementClass = CaselessLiteral + else: + isequal = ( lambda a,b: a == b ) + masks = ( lambda a,b: b.startswith(a) ) + parseElementClass = Literal + + symbols = [] + if isinstance(strs,basestring): + symbols = strs.split() + elif isinstance(strs, Iterable): + symbols = list(strs) + else: + warnings.warn("Invalid argument to oneOf, expected string or iterable", + SyntaxWarning, stacklevel=2) + if not symbols: + return NoMatch() + + i = 0 + while i < len(symbols)-1: + cur = symbols[i] + for j,other in enumerate(symbols[i+1:]): + if ( isequal(other, cur) ): + del symbols[i+j+1] + break + elif ( masks(cur, other) ): + del symbols[i+j+1] + symbols.insert(i,other) + cur = other + break + else: + i += 1 + + if not caseless and useRegex: + #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) + try: + if len(symbols)==len("".join(symbols)): + return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ).setName(' | '.join(symbols)) + else: + return Regex( "|".join(re.escape(sym) for sym in symbols) ).setName(' | '.join(symbols)) + except Exception: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", + SyntaxWarning, stacklevel=2) + + + # last resort, just use MatchFirst + return MatchFirst(parseElementClass(sym) for sym in symbols).setName(' | '.join(symbols)) + +def dictOf( key, value ): + """ + Helper to easily and clearly define a dictionary by specifying the respective patterns + for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens + in the proper order. The key pattern can include delimiting markers or punctuation, + as long as they are suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the C{Dict} results can include named token + fields. + + Example:: + text = "shape: SQUARE posn: upper left color: light blue texture: burlap" + attr_expr = (label + Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join)) + print(OneOrMore(attr_expr).parseString(text).dump()) + + attr_label = label + attr_value = Suppress(':') + OneOrMore(data_word, stopOn=label).setParseAction(' '.join) + + # similar to Dict, but simpler call format + result = dictOf(attr_label, attr_value).parseString(text) + print(result.dump()) + print(result['shape']) + print(result.shape) # object attribute access works too + print(result.asDict()) + prints:: + [['shape', 'SQUARE'], ['posn', 'upper left'], ['color', 'light blue'], ['texture', 'burlap']] + - color: light blue + - posn: upper left + - shape: SQUARE + - texture: burlap + SQUARE + SQUARE + {'color': 'light blue', 'shape': 'SQUARE', 'posn': 'upper left', 'texture': 'burlap'} + """ + return Dict( ZeroOrMore( Group ( key + value ) ) ) + +def originalTextFor(expr, asString=True): + """ + Helper to return the original, untokenized text for a given expression. Useful to + restore the parsed fields of an HTML start tag into the raw tag text itself, or to + revert separate tokens with intervening whitespace back to the original matching + input text. By default, returns astring containing the original parsed text. + + If the optional C{asString} argument is passed as C{False}, then the return value is a + C{L{ParseResults}} containing any results names that were originally matched, and a + single token containing the original matched text from the input string. So if + the expression passed to C{L{originalTextFor}} contains expressions with defined + results names, you must set C{asString} to C{False} if you want to preserve those + results name values. + + Example:: + src = "this is test <b> bold <i>text</i> </b> normal text " + for tag in ("b","i"): + opener,closer = makeHTMLTags(tag) + patt = originalTextFor(opener + SkipTo(closer) + closer) + print(patt.searchString(src)[0]) + prints:: + ['<b> bold <i>text</i> </b>'] + ['<i>text</i>'] + """ + locMarker = Empty().setParseAction(lambda s,loc,t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s,l,t: s[t._original_start:t._original_end] + else: + def extractText(s,l,t): + t[:] = [s[t.pop('_original_start'):t.pop('_original_end')]] + matchExpr.setParseAction(extractText) + matchExpr.ignoreExprs = expr.ignoreExprs + return matchExpr + +def ungroup(expr): + """ + Helper to undo pyparsing's default grouping of And expressions, even + if all but one are non-empty. + """ + return TokenConverter(expr).setParseAction(lambda t:t[0]) + +def locatedExpr(expr): + """ + Helper to decorate a returned token with its starting and ending locations in the input string. + This helper adds the following results names: + - locn_start = location where matched expression begins + - locn_end = location where matched expression ends + - value = the actual parsed results + + Be careful if the input text contains C{<TAB>} characters, you may want to call + C{L{ParserElement.parseWithTabs}} + + Example:: + wd = Word(alphas) + for match in locatedExpr(wd).searchString("ljsdf123lksdjjf123lkkjj1222"): + print(match) + prints:: + [[0, 'ljsdf', 5]] + [[8, 'lksdjjf', 15]] + [[18, 'lkkjj', 23]] + """ + locator = Empty().setParseAction(lambda s,l,t: l) + return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) + + +# convenience constants for positional expressions +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") + +_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" + +def srange(s): + r""" + Helper to easily define string ranges for use in Word construction. Borrows + syntax from regexp '[]' string range definitions:: + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + The input string must be enclosed in []'s, and the returned string is the expanded + character set joined into a single string. + The values enclosed in the []'s may be: + - a single character + - an escaped character with a leading backslash (such as C{\-} or C{\]}) + - an escaped hex character with a leading C{'\x'} (C{\x21}, which is a C{'!'} character) + (C{\0x##} is also supported for backwards compatibility) + - an escaped octal character with a leading C{'\0'} (C{\041}, which is a C{'!'} character) + - a range of any of the above, separated by a dash (C{'a-z'}, etc.) + - any combination of the above (C{'aeiouy'}, C{'a-zA-Z0-9_$'}, etc.) + """ + _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) + try: + return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) + except Exception: + return "" + +def matchOnlyAtCol(n): + """ + Helper method for defining parse actions that require matching at a specific + column in the input text. + """ + def verifyCol(strg,locn,toks): + if col(locn,strg) != n: + raise ParseException(strg,locn,"matched token not at column %d" % n) + return verifyCol + +def replaceWith(replStr): + """ + Helper method for common parse actions that simply return a literal value. Especially + useful when used with C{L{transformString<ParserElement.transformString>}()}. + + Example:: + num = Word(nums).setParseAction(lambda toks: int(toks[0])) + na = oneOf("N/A NA").setParseAction(replaceWith(math.nan)) + term = na | num + + OneOrMore(term).parseString("324 234 N/A 234") # -> [324, 234, nan, 234] + """ + return lambda s,l,t: [replStr] + +def removeQuotes(s,l,t): + """ + Helper parse action for removing quotation marks from parsed quoted strings. + + Example:: + # by default, quotation marks are included in parsed results + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["'Now is the Winter of our Discontent'"] + + # use removeQuotes to strip quotation marks from parsed results + quotedString.setParseAction(removeQuotes) + quotedString.parseString("'Now is the Winter of our Discontent'") # -> ["Now is the Winter of our Discontent"] + """ + return t[0][1:-1] + +def tokenMap(func, *args): + """ + Helper to define a parse action by mapping a function to all elements of a ParseResults list.If any additional + args are passed, they are forwarded to the given function as additional arguments after + the token, as in C{hex_integer = Word(hexnums).setParseAction(tokenMap(int, 16))}, which will convert the + parsed data to an integer using base 16. + + Example (compare the last to example in L{ParserElement.transformString}:: + hex_ints = OneOrMore(Word(hexnums)).setParseAction(tokenMap(int, 16)) + hex_ints.runTests(''' + 00 11 22 aa FF 0a 0d 1a + ''') + + upperword = Word(alphas).setParseAction(tokenMap(str.upper)) + OneOrMore(upperword).runTests(''' + my kingdom for a horse + ''') + + wd = Word(alphas).setParseAction(tokenMap(str.title)) + OneOrMore(wd).setParseAction(' '.join).runTests(''' + now is the winter of our discontent made glorious summer by this sun of york + ''') + prints:: + 00 11 22 aa FF 0a 0d 1a + [0, 17, 34, 170, 255, 10, 13, 26] + + my kingdom for a horse + ['MY', 'KINGDOM', 'FOR', 'A', 'HORSE'] + + now is the winter of our discontent made glorious summer by this sun of york + ['Now Is The Winter Of Our Discontent Made Glorious Summer By This Sun Of York'] + """ + def pa(s,l,t): + return [func(tokn, *args) for tokn in t] + + try: + func_name = getattr(func, '__name__', + getattr(func, '__class__').__name__) + except Exception: + func_name = str(func) + pa.__name__ = func_name + + return pa + +upcaseTokens = tokenMap(lambda t: _ustr(t).upper()) +"""(Deprecated) Helper parse action to convert tokens to upper case. Deprecated in favor of L{pyparsing_common.upcaseTokens}""" + +downcaseTokens = tokenMap(lambda t: _ustr(t).lower()) +"""(Deprecated) Helper parse action to convert tokens to lower case. Deprecated in favor of L{pyparsing_common.downcaseTokens}""" + +def _makeTags(tagStr, xml): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr,basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas,alphanums+"_-:") + if (xml): + tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + else: + printablesLessRAbrack = "".join(c for c in printables if c not in ">") + tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ + Optional( Suppress("=") + tagAttrValue ) ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + closeTag = Combine(_L("</") + tagStr + ">") + + openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % resname) + closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) + openTag.tag = resname + closeTag.tag = resname + return openTag, closeTag + +def makeHTMLTags(tagStr): + """ + Helper to construct opening and closing tag expressions for HTML, given a tag name. Matches + tags in either upper or lower case, attributes with namespaces and with quoted or unquoted values. + + Example:: + text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' + # makeHTMLTags returns pyparsing expressions for the opening and closing tags as a 2-tuple + a,a_end = makeHTMLTags("A") + link_expr = a + SkipTo(a_end)("link_text") + a_end + + for link in link_expr.searchString(text): + # attributes in the <A> tag (like "href" shown here) are also accessible as named results + print(link.link_text, '->', link.href) + prints:: + pyparsing -> http://pyparsing.wikispaces.com + """ + return _makeTags( tagStr, False ) + +def makeXMLTags(tagStr): + """ + Helper to construct opening and closing tag expressions for XML, given a tag name. Matches + tags only in the given upper/lower case. + + Example: similar to L{makeHTMLTags} + """ + return _makeTags( tagStr, True ) + +def withAttribute(*args,**attrDict): + """ + Helper to create a validating parse action to be used with start tags created + with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag + with a required attribute value, to avoid false matches on common tags such as + C{<TD>} or C{<DIV>}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + + Example:: + html = ''' + <div> + Some text + <div type="grid">1 4 0 1 0</div> + <div type="graph">1,3 2,3 1,1</div> + <div>this has no type</div> + </div> + + ''' + div,div_end = makeHTMLTags("div") + + # only match div tag having a type attribute with value "grid" + div_grid = div().setParseAction(withAttribute(type="grid")) + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + # construct a match with any div tag having a type attribute, regardless of the value + div_any_type = div().setParseAction(withAttribute(type=withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """ + Simplified version of C{L{withAttribute}} when matching on a div class - made + difficult because C{class} is a reserved word in Python. + + Example:: + html = ''' + <div> + Some text + <div class="grid">1 4 0 1 0</div> + <div class="graph">1,3 2,3 1,1</div> + <div>this <div> has no class</div> + </div> + + ''' + div,div_end = makeHTMLTags("div") + div_grid = div().setParseAction(withClass("grid")) + + grid_expr = div_grid + SkipTo(div | div_end)("body") + for grid_header in grid_expr.searchString(html): + print(grid_header.body) + + div_any_type = div().setParseAction(withClass(withAttribute.ANY_VALUE)) + div_expr = div_any_type + SkipTo(div | div_end)("body") + for div_header in div_expr.searchString(html): + print(div_header.body) + prints:: + 1 4 0 1 0 + + 1 4 0 1 0 + 1,3 2,3 1,1 + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr : classname}) + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): + """ + Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. The generated parser will also recognize the use + of parentheses to override operator precedences (see example below). + + Note: if you define a deep operator list, you may see performance issues + when using infixNotation. See L{ParserElement.enablePackrat} for a + mechanism to potentially improve your parser performance. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted); if the parse action + is passed a tuple or list of functions, this is equivalent to + calling C{setParseAction(*fn)} (L{ParserElement.setParseAction}) + - lpar - expression for matching left-parentheses (default=C{Suppress('(')}) + - rpar - expression for matching right-parentheses (default=C{Suppress(')')}) + + Example:: + # simple example of four-function arithmetic with ints and variable names + integer = pyparsing_common.signed_integer + varname = pyparsing_common.identifier + + arith_expr = infixNotation(integer | varname, + [ + ('-', 1, opAssoc.RIGHT), + (oneOf('* /'), 2, opAssoc.LEFT), + (oneOf('+ -'), 2, opAssoc.LEFT), + ]) + + arith_expr.runTests(''' + 5+3*6 + (5+3)*6 + -2--11 + ''', fullDump=False) + prints:: + 5+3*6 + [[5, '+', [3, '*', 6]]] + + (5+3)*6 + [[[5, '+', 3], '*', 6]] + + -2--11 + [[['-', 2], '-', ['-', 11]]] + """ + ret = Forward() + lastExpr = baseExpr | ( lpar + ret + rpar ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + termName = "%s term" % opExpr if arity < 3 else "%s%s term" % opExpr + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward().setName(termName) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + if isinstance(pa, (tuple, list)): + matchExpr.setParseAction(*pa) + else: + matchExpr.setParseAction(pa) + thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) + lastExpr = thisExpr + ret <<= lastExpr + return ret + +operatorPrecedence = infixNotation +"""(Deprecated) Former name of C{L{infixNotation}}, will be dropped in a future release.""" + +dblQuotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"').setName("string enclosed in double quotes") +sglQuotedString = Combine(Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("string enclosed in single quotes") +quotedString = Combine(Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*')+'"'| + Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\(?:[^x]|x[0-9a-fA-F]+)))*")+"'").setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()).setName("unicode string literal") + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """ + Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default=C{"("}); can also be a pyparsing expression + - closer - closing character for a nested list (default=C{")"}); can also be a pyparsing expression + - content - expression for items within the nested lists (default=C{None}) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=C{quotedString}) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + + Example:: + data_type = oneOf("void int short long char float double") + decl_data_type = Combine(data_type + Optional(Word('*'))) + ident = Word(alphas+'_', alphanums+'_') + number = pyparsing_common.number + arg = Group(decl_data_type + ident) + LPAR,RPAR = map(Suppress, "()") + + code_body = nestedExpr('{', '}', ignoreExpr=(quotedString | cStyleComment)) + + c_function = (decl_data_type("type") + + ident("name") + + LPAR + Optional(delimitedList(arg), [])("args") + RPAR + + code_body("body")) + c_function.ignore(cStyleComment) + + source_code = ''' + int is_odd(int x) { + return (x%2); + } + + int dec_to_hex(char hchar) { + if (hchar >= '0' && hchar <= '9') { + return (ord(hchar)-ord('0')); + } else { + return (10+ord(hchar)-ord('A')); + } + } + ''' + for func in c_function.searchString(source_code): + print("%(name)s (%(type)s) args: %(args)s" % func) + + prints:: + is_odd (int) args: [['int', 'x']] + dec_to_hex (int) args: [['char', 'hchar']] + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + ret.setName('nested %s%s expression' % (opener,closer)) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """ + Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=C{True}) + + A valid block must contain at least one C{blockStatement}. + + Example:: + data = ''' + def A(z): + A1 + B = 100 + G = A2 + A2 + A3 + B + def BB(a,b,c): + BB1 + def BBA(): + bba1 + bba2 + bba3 + C + D + def spam(x,y): + def eggs(z): + pass + ''' + + + indentStack = [1] + stmt = Forward() + + identifier = Word(alphas, alphanums) + funcDecl = ("def" + identifier + Group( "(" + Optional( delimitedList(identifier) ) + ")" ) + ":") + func_body = indentedBlock(stmt, indentStack) + funcDef = Group( funcDecl + func_body ) + + rvalue = Forward() + funcCall = Group(identifier + "(" + Optional(delimitedList(rvalue)) + ")") + rvalue << (funcCall | identifier | Word(nums)) + assignment = Group(identifier + "=" + rvalue) + stmt << ( funcDef | assignment | identifier ) + + module_body = OneOrMore(stmt) + + parseTree = module_body.parseString(data) + parseTree.pprint() + prints:: + [['def', + 'A', + ['(', 'z', ')'], + ':', + [['A1'], [['B', '=', '100']], [['G', '=', 'A2']], ['A2'], ['A3']]], + 'B', + ['def', + 'BB', + ['(', 'a', 'b', 'c', ')'], + ':', + [['BB1'], [['def', 'BBA', ['(', ')'], ':', [['bba1'], ['bba2'], ['bba3']]]]]], + 'C', + 'D', + ['def', + 'spam', + ['(', 'x', 'y', ')'], + ':', + [[['def', 'eggs', ['(', 'z', ')'], ':', [['pass']]]]]]] + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = (Empty() + Empty().setParseAction(checkSubIndent)).setName('INDENT') + PEER = Empty().setParseAction(checkPeerIndent).setName('') + UNDENT = Empty().setParseAction(checkUnindent).setName('UNINDENT') + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr.setName('indented block') + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:").setName('any tag')) +_htmlEntityMap = dict(zip("gt lt amp nbsp quot apos".split(),'><& "\'')) +commonHTMLEntity = Regex('&(?P<entity>' + '|'.join(_htmlEntityMap.keys()) +");").setName("common HTML entity") +def replaceHTMLEntity(t): + """Helper parser action to replace common HTML entities with their special characters""" + return _htmlEntityMap.get(t.entity) + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/').setName("C style comment") +"Comment of the form C{/* ... */}" + +htmlComment = Regex(r"<!--[\s\S]*?-->").setName("HTML comment") +"Comment of the form C{<!-- ... -->}" + +restOfLine = Regex(r".*").leaveWhitespace().setName("rest of line") +dblSlashComment = Regex(r"//(?:\\\n|[^\n])*").setName("// comment") +"Comment of the form C{// ... (to end of line)}" + +cppStyleComment = Combine(Regex(r"/\*(?:[^*]|\*(?!/))*") + '*/'| dblSlashComment).setName("C++ style comment") +"Comment of either form C{L{cStyleComment}} or C{L{dblSlashComment}}" + +javaStyleComment = cppStyleComment +"Same as C{L{cppStyleComment}}" + +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +"Comment of the form C{# ... (to end of line)}" + +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional( Word(" \t") + + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") +commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") +"""(Deprecated) Predefined expression of 1 or more printable words or quoted strings, separated by commas. + This expression is deprecated in favor of L{pyparsing_common.comma_separated_list}.""" + +# some other useful expressions - using lower-case class name since we are really using this as a namespace +class pyparsing_common: + """ + Here are some common low-level expressions that may be useful in jump-starting parser development: + - numeric forms (L{integers<integer>}, L{reals<real>}, L{scientific notation<sci_real>}) + - common L{programming identifiers<identifier>} + - network addresses (L{MAC<mac_address>}, L{IPv4<ipv4_address>}, L{IPv6<ipv6_address>}) + - ISO8601 L{dates<iso8601_date>} and L{datetime<iso8601_datetime>} + - L{UUID<uuid>} + - L{comma-separated list<comma_separated_list>} + Parse actions: + - C{L{convertToInteger}} + - C{L{convertToFloat}} + - C{L{convertToDate}} + - C{L{convertToDatetime}} + - C{L{stripHTMLTags}} + - C{L{upcaseTokens}} + - C{L{downcaseTokens}} + + Example:: + pyparsing_common.number.runTests(''' + # any int or real number, returned as the appropriate type + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.fnumber.runTests(''' + # any int or real number, returned as float + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + ''') + + pyparsing_common.hex_integer.runTests(''' + # hex numbers + 100 + FF + ''') + + pyparsing_common.fraction.runTests(''' + # fractions + 1/2 + -3/4 + ''') + + pyparsing_common.mixed_integer.runTests(''' + # mixed fractions + 1 + 1/2 + -3/4 + 1-3/4 + ''') + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(''' + # uuid + 12345678-1234-5678-1234-567812345678 + ''') + prints:: + # any int or real number, returned as the appropriate type + 100 + [100] + + -100 + [-100] + + +100 + [100] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # any int or real number, returned as float + 100 + [100.0] + + -100 + [-100.0] + + +100 + [100.0] + + 3.14159 + [3.14159] + + 6.02e23 + [6.02e+23] + + 1e-12 + [1e-12] + + # hex numbers + 100 + [256] + + FF + [255] + + # fractions + 1/2 + [0.5] + + -3/4 + [-0.75] + + # mixed fractions + 1 + [1] + + 1/2 + [0.5] + + -3/4 + [-0.75] + + 1-3/4 + [1.75] + + # uuid + 12345678-1234-5678-1234-567812345678 + [UUID('12345678-1234-5678-1234-567812345678')] + """ + + convertToInteger = tokenMap(int) + """ + Parse action for converting parsed integers to Python int + """ + + convertToFloat = tokenMap(float) + """ + Parse action for converting parsed numbers to Python float + """ + + integer = Word(nums).setName("integer").setParseAction(convertToInteger) + """expression that parses an unsigned integer, returns an int""" + + hex_integer = Word(hexnums).setName("hex integer").setParseAction(tokenMap(int,16)) + """expression that parses a hexadecimal integer, returns an int""" + + signed_integer = Regex(r'[+-]?\d+').setName("signed integer").setParseAction(convertToInteger) + """expression that parses an integer with optional leading sign, returns an int""" + + fraction = (signed_integer().setParseAction(convertToFloat) + '/' + signed_integer().setParseAction(convertToFloat)).setName("fraction") + """fractional expression of an integer divided by an integer, returns a float""" + fraction.addParseAction(lambda t: t[0]/t[-1]) + + mixed_integer = (fraction | signed_integer + Optional(Optional('-').suppress() + fraction)).setName("fraction or mixed integer-fraction") + """mixed integer of the form 'integer - fraction', with optional leading integer, returns float""" + mixed_integer.addParseAction(sum) + + real = Regex(r'[+-]?\d+\.\d*').setName("real number").setParseAction(convertToFloat) + """expression that parses a floating point number and returns a float""" + + sci_real = Regex(r'[+-]?\d+([eE][+-]?\d+|\.\d*([eE][+-]?\d+)?)').setName("real number with scientific notation").setParseAction(convertToFloat) + """expression that parses a floating point number with optional scientific notation and returns a float""" + + # streamlining this expression makes the docs nicer-looking + number = (sci_real | real | signed_integer).streamline() + """any numeric expression, returns the corresponding Python type""" + + fnumber = Regex(r'[+-]?\d+\.?\d*([eE][+-]?\d+)?').setName("fnumber").setParseAction(convertToFloat) + """any int or real number, returned as float""" + + identifier = Word(alphas+'_', alphanums+'_').setName("identifier") + """typical code identifier (leading alpha or '_', followed by 0 or more alphas, nums, or '_')""" + + ipv4_address = Regex(r'(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1?[0-9]{1,2})){3}').setName("IPv4 address") + "IPv4 address (C{0.0.0.0 - 255.255.255.255})" + + _ipv6_part = Regex(r'[0-9a-fA-F]{1,4}').setName("hex_integer") + _full_ipv6_address = (_ipv6_part + (':' + _ipv6_part)*7).setName("full IPv6 address") + _short_ipv6_address = (Optional(_ipv6_part + (':' + _ipv6_part)*(0,6)) + "::" + Optional(_ipv6_part + (':' + _ipv6_part)*(0,6))).setName("short IPv6 address") + _short_ipv6_address.addCondition(lambda t: sum(1 for tt in t if pyparsing_common._ipv6_part.matches(tt)) < 8) + _mixed_ipv6_address = ("::ffff:" + ipv4_address).setName("mixed IPv6 address") + ipv6_address = Combine((_full_ipv6_address | _mixed_ipv6_address | _short_ipv6_address).setName("IPv6 address")).setName("IPv6 address") + "IPv6 address (long, short, or mixed form)" + + mac_address = Regex(r'[0-9a-fA-F]{2}([:.-])[0-9a-fA-F]{2}(?:\1[0-9a-fA-F]{2}){4}').setName("MAC address") + "MAC address xx:xx:xx:xx:xx (may also have '-' or '.' delimiters)" + + @staticmethod + def convertToDate(fmt="%Y-%m-%d"): + """ + Helper to create a parse action for converting parsed date string to Python datetime.date + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%d"}) + + Example:: + date_expr = pyparsing_common.iso8601_date.copy() + date_expr.setParseAction(pyparsing_common.convertToDate()) + print(date_expr.parseString("1999-12-31")) + prints:: + [datetime.date(1999, 12, 31)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt).date() + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + @staticmethod + def convertToDatetime(fmt="%Y-%m-%dT%H:%M:%S.%f"): + """ + Helper to create a parse action for converting parsed datetime string to Python datetime.datetime + + Params - + - fmt - format to be passed to datetime.strptime (default=C{"%Y-%m-%dT%H:%M:%S.%f"}) + + Example:: + dt_expr = pyparsing_common.iso8601_datetime.copy() + dt_expr.setParseAction(pyparsing_common.convertToDatetime()) + print(dt_expr.parseString("1999-12-31T23:59:59.999")) + prints:: + [datetime.datetime(1999, 12, 31, 23, 59, 59, 999000)] + """ + def cvt_fn(s,l,t): + try: + return datetime.strptime(t[0], fmt) + except ValueError as ve: + raise ParseException(s, l, str(ve)) + return cvt_fn + + iso8601_date = Regex(r'(?P<year>\d{4})(?:-(?P<month>\d\d)(?:-(?P<day>\d\d))?)?').setName("ISO8601 date") + "ISO8601 date (C{yyyy-mm-dd})" + + iso8601_datetime = Regex(r'(?P<year>\d{4})-(?P<month>\d\d)-(?P<day>\d\d)[T ](?P<hour>\d\d):(?P<minute>\d\d)(:(?P<second>\d\d(\.\d*)?)?)?(?P<tz>Z|[+-]\d\d:?\d\d)?').setName("ISO8601 datetime") + "ISO8601 datetime (C{yyyy-mm-ddThh:mm:ss.s(Z|+-00:00)}) - trailing seconds, milliseconds, and timezone optional; accepts separating C{'T'} or C{' '}" + + uuid = Regex(r'[0-9a-fA-F]{8}(-[0-9a-fA-F]{4}){3}-[0-9a-fA-F]{12}').setName("UUID") + "UUID (C{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx})" + + _html_stripper = anyOpenTag.suppress() | anyCloseTag.suppress() + @staticmethod + def stripHTMLTags(s, l, tokens): + """ + Parse action to remove HTML tags from web page HTML source + + Example:: + # strip HTML links from normal text + text = '<td>More info at the <a href="http://pyparsing.wikispaces.com">pyparsing</a> wiki page</td>' + td,td_end = makeHTMLTags("TD") + table_text = td + SkipTo(td_end).setParseAction(pyparsing_common.stripHTMLTags)("body") + td_end + + print(table_text.parseString(text).body) # -> 'More info at the pyparsing wiki page' + """ + return pyparsing_common._html_stripper.transformString(tokens[0]) + + _commasepitem = Combine(OneOrMore(~Literal(",") + ~LineEnd() + Word(printables, excludeChars=',') + + Optional( White(" \t") ) ) ).streamline().setName("commaItem") + comma_separated_list = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("comma separated list") + """Predefined expression of 1 or more printable words or quoted strings, separated by commas.""" + + upcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).upper())) + """Parse action to convert tokens to upper case.""" + + downcaseTokens = staticmethod(tokenMap(lambda t: _ustr(t).lower())) + """Parse action to convert tokens to lower case.""" + + +if __name__ == "__main__": + + selectToken = CaselessLiteral("select") + fromToken = CaselessLiteral("from") + + ident = Word(alphas, alphanums + "_$") + + columnName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + columnNameList = Group(delimitedList(columnName)).setName("columns") + columnSpec = ('*' | columnNameList) + + tableName = delimitedList(ident, ".", combine=True).setParseAction(upcaseTokens) + tableNameList = Group(delimitedList(tableName)).setName("tables") + + simpleSQL = selectToken("command") + columnSpec("columns") + fromToken + tableNameList("tables") + + # demo runTests method, including embedded comments in test string + simpleSQL.runTests(""" + # '*' as column list and dotted table name + select * from SYS.XYZZY + + # caseless match on "SELECT", and casts back to "select" + SELECT * from XYZZY, ABC + + # list of column names, and mixed case SELECT keyword + Select AA,BB,CC from Sys.dual + + # multiple tables + Select A, B, C from Sys.dual, Table2 + + # invalid SELECT keyword - should fail + Xelect A, B, C from Sys.dual + + # incomplete command - should fail + Select + + # invalid column name - should fail + Select ^^^ frox Sys.dual + + """) + + pyparsing_common.number.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + # any int or real number, returned as float + pyparsing_common.fnumber.runTests(""" + 100 + -100 + +100 + 3.14159 + 6.02e23 + 1e-12 + """) + + pyparsing_common.hex_integer.runTests(""" + 100 + FF + """) + + import uuid + pyparsing_common.uuid.setParseAction(tokenMap(uuid.UUID)) + pyparsing_common.uuid.runTests(""" + 12345678-1234-5678-1234-567812345678 + """) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/_vendor/six.py b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/six.py new file mode 100644 index 0000000000000000000000000000000000000000..190c0239cd7d7af82a6e0cbc8d68053fa2e3dfaf --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/_vendor/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/archive_util.py b/backend/test/lib/python3.8/site-packages/setuptools/archive_util.py new file mode 100644 index 0000000000000000000000000000000000000000..81436044d995ff430334a7ef324b08e616f4b7a7 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/archive_util.py @@ -0,0 +1,173 @@ +"""Utilities for extracting common archive formats""" + +import zipfile +import tarfile +import os +import shutil +import posixpath +import contextlib +from distutils.errors import DistutilsError + +from pkg_resources import ensure_directory + +__all__ = [ + "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", + "UnrecognizedFormat", "extraction_drivers", "unpack_directory", +] + + +class UnrecognizedFormat(DistutilsError): + """Couldn't recognize the archive type""" + + +def default_filter(src, dst): + """The default progress/filter callback; returns True for all files""" + return dst + + +def unpack_archive(filename, extract_dir, progress_filter=default_filter, + drivers=None): + """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` + + `progress_filter` is a function taking two arguments: a source path + internal to the archive ('/'-separated), and a filesystem path where it + will be extracted. The callback must return the desired extract path + (which may be the same as the one passed in), or else ``None`` to skip + that file or directory. The callback can thus be used to report on the + progress of the extraction, as well as to filter the items extracted or + alter their extraction paths. + + `drivers`, if supplied, must be a non-empty sequence of functions with the + same signature as this function (minus the `drivers` argument), that raise + ``UnrecognizedFormat`` if they do not support extracting the designated + archive type. The `drivers` are tried in sequence until one is found that + does not raise an error, or until all are exhausted (in which case + ``UnrecognizedFormat`` is raised). If you do not supply a sequence of + drivers, the module's ``extraction_drivers`` constant will be used, which + means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that + order. + """ + for driver in drivers or extraction_drivers: + try: + driver(filename, extract_dir, progress_filter) + except UnrecognizedFormat: + continue + else: + return + else: + raise UnrecognizedFormat( + "Not a recognized archive type: %s" % filename + ) + + +def unpack_directory(filename, extract_dir, progress_filter=default_filter): + """"Unpack" a directory, using the same interface as for archives + + Raises ``UnrecognizedFormat`` if `filename` is not a directory + """ + if not os.path.isdir(filename): + raise UnrecognizedFormat("%s is not a directory" % filename) + + paths = { + filename: ('', extract_dir), + } + for base, dirs, files in os.walk(filename): + src, dst = paths[base] + for d in dirs: + paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) + for f in files: + target = os.path.join(dst, f) + target = progress_filter(src + f, target) + if not target: + # skip non-files + continue + ensure_directory(target) + f = os.path.join(base, f) + shutil.copyfile(f, target) + shutil.copystat(f, target) + + +def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): + """Unpack zip `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined + by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + + if not zipfile.is_zipfile(filename): + raise UnrecognizedFormat("%s is not a zip file" % (filename,)) + + with zipfile.ZipFile(filename) as z: + for info in z.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name.split('/'): + continue + + target = os.path.join(extract_dir, *name.split('/')) + target = progress_filter(name, target) + if not target: + continue + if name.endswith('/'): + # directory + ensure_directory(target) + else: + # file + ensure_directory(target) + data = z.read(info.filename) + with open(target, 'wb') as f: + f.write(data) + unix_attributes = info.external_attr >> 16 + if unix_attributes: + os.chmod(target, unix_attributes) + + +def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined + by ``tarfile.open()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise UnrecognizedFormat( + "%s is not a compressed or uncompressed tar file" % (filename,) + ) + with contextlib.closing(tarobj): + # don't do any chowning! + tarobj.chown = lambda *args: None + for member in tarobj: + name = member.name + # don't extract absolute paths or ones with .. in them + if not name.startswith('/') and '..' not in name.split('/'): + prelim_dst = os.path.join(extract_dir, *name.split('/')) + + # resolve any links and to extract the link targets as normal + # files + while member is not None and (member.islnk() or member.issym()): + linkpath = member.linkname + if member.issym(): + base = posixpath.dirname(member.name) + linkpath = posixpath.join(base, linkpath) + linkpath = posixpath.normpath(linkpath) + member = tarobj._getmember(linkpath) + + if member is not None and (member.isfile() or member.isdir()): + final_dst = progress_filter(name, prelim_dst) + if final_dst: + if final_dst.endswith(os.sep): + final_dst = final_dst[:-1] + try: + # XXX Ugh + tarobj._extract_member(member, final_dst) + except tarfile.ExtractError: + # chown/chmod/mkfifo/mknode/makedev failed + pass + return True + + +extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/backend/test/lib/python3.8/site-packages/setuptools/build_meta.py b/backend/test/lib/python3.8/site-packages/setuptools/build_meta.py new file mode 100644 index 0000000000000000000000000000000000000000..10c4b528d996d23a1319e3fed755aef0e6da2eb9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/build_meta.py @@ -0,0 +1,257 @@ +"""A PEP 517 interface to setuptools + +Previously, when a user or a command line tool (let's call it a "frontend") +needed to make a request of setuptools to take a certain action, for +example, generating a list of installation requirements, the frontend would +would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line. + +PEP 517 defines a different method of interfacing with setuptools. Rather +than calling "setup.py" directly, the frontend should: + + 1. Set the current directory to the directory with a setup.py file + 2. Import this module into a safe python interpreter (one in which + setuptools can potentially set global variables or crash hard). + 3. Call one of the functions defined in PEP 517. + +What each function does is defined in PEP 517. However, here is a "casual" +definition of the functions (this definition should not be relied on for +bug reports or API stability): + + - `build_wheel`: build a wheel in the folder and return the basename + - `get_requires_for_build_wheel`: get the `setup_requires` to build + - `prepare_metadata_for_build_wheel`: get the `install_requires` + - `build_sdist`: build an sdist in the folder and return the basename + - `get_requires_for_build_sdist`: get the `setup_requires` to build + +Again, this is not a formal definition! Just a "taste" of the module. +""" + +import io +import os +import sys +import tokenize +import shutil +import contextlib + +import setuptools +import distutils +from setuptools.py31compat import TemporaryDirectory + +from pkg_resources import parse_requirements +from pkg_resources.py31compat import makedirs + +__all__ = ['get_requires_for_build_sdist', + 'get_requires_for_build_wheel', + 'prepare_metadata_for_build_wheel', + 'build_wheel', + 'build_sdist', + '__legacy__', + 'SetupRequirementsError'] + +class SetupRequirementsError(BaseException): + def __init__(self, specifiers): + self.specifiers = specifiers + + +class Distribution(setuptools.dist.Distribution): + def fetch_build_eggs(self, specifiers): + specifier_list = list(map(str, parse_requirements(specifiers))) + + raise SetupRequirementsError(specifier_list) + + @classmethod + @contextlib.contextmanager + def patch(cls): + """ + Replace + distutils.dist.Distribution with this class + for the duration of this context. + """ + orig = distutils.core.Distribution + distutils.core.Distribution = cls + try: + yield + finally: + distutils.core.Distribution = orig + + +def _to_str(s): + """ + Convert a filename to a string (on Python 2, explicitly + a byte string, not Unicode) as distutils checks for the + exact type str. + """ + if sys.version_info[0] == 2 and not isinstance(s, str): + # Assume it's Unicode, as that's what the PEP says + # should be provided. + return s.encode(sys.getfilesystemencoding()) + return s + + +def _get_immediate_subdirectories(a_dir): + return [name for name in os.listdir(a_dir) + if os.path.isdir(os.path.join(a_dir, name))] + + +def _file_with_extension(directory, extension): + matching = ( + f for f in os.listdir(directory) + if f.endswith(extension) + ) + file, = matching + return file + + +def _open_setup_script(setup_script): + if not os.path.exists(setup_script): + # Supply a default setup.py + return io.StringIO(u"from setuptools import setup; setup()") + + return getattr(tokenize, 'open', open)(setup_script) + + +class _BuildMetaBackend(object): + + def _fix_config(self, config_settings): + config_settings = config_settings or {} + config_settings.setdefault('--global-option', []) + return config_settings + + def _get_build_requires(self, config_settings, requirements): + config_settings = self._fix_config(config_settings) + + sys.argv = sys.argv[:1] + ['egg_info'] + \ + config_settings["--global-option"] + try: + with Distribution.patch(): + self.run_setup() + except SetupRequirementsError as e: + requirements += e.specifiers + + return requirements + + def run_setup(self, setup_script='setup.py'): + # Note that we can reuse our build directory between calls + # Correctness comes first, then optimization later + __file__ = setup_script + __name__ = '__main__' + + with _open_setup_script(__file__) as f: + code = f.read().replace(r'\r\n', r'\n') + + exec(compile(code, __file__, 'exec'), locals()) + + def get_requires_for_build_wheel(self, config_settings=None): + config_settings = self._fix_config(config_settings) + return self._get_build_requires(config_settings, requirements=['wheel']) + + def get_requires_for_build_sdist(self, config_settings=None): + config_settings = self._fix_config(config_settings) + return self._get_build_requires(config_settings, requirements=[]) + + def prepare_metadata_for_build_wheel(self, metadata_directory, + config_settings=None): + sys.argv = sys.argv[:1] + ['dist_info', '--egg-base', + _to_str(metadata_directory)] + self.run_setup() + + dist_info_directory = metadata_directory + while True: + dist_infos = [f for f in os.listdir(dist_info_directory) + if f.endswith('.dist-info')] + + if (len(dist_infos) == 0 and + len(_get_immediate_subdirectories(dist_info_directory)) == 1): + + dist_info_directory = os.path.join( + dist_info_directory, os.listdir(dist_info_directory)[0]) + continue + + assert len(dist_infos) == 1 + break + + # PEP 517 requires that the .dist-info directory be placed in the + # metadata_directory. To comply, we MUST copy the directory to the root + if dist_info_directory != metadata_directory: + shutil.move( + os.path.join(dist_info_directory, dist_infos[0]), + metadata_directory) + shutil.rmtree(dist_info_directory, ignore_errors=True) + + return dist_infos[0] + + def _build_with_temp_dir(self, setup_command, result_extension, + result_directory, config_settings): + config_settings = self._fix_config(config_settings) + result_directory = os.path.abspath(result_directory) + + # Build in a temporary directory, then copy to the target. + makedirs(result_directory, exist_ok=True) + with TemporaryDirectory(dir=result_directory) as tmp_dist_dir: + sys.argv = (sys.argv[:1] + setup_command + + ['--dist-dir', tmp_dist_dir] + + config_settings["--global-option"]) + self.run_setup() + + result_basename = _file_with_extension(tmp_dist_dir, result_extension) + result_path = os.path.join(result_directory, result_basename) + if os.path.exists(result_path): + # os.rename will fail overwriting on non-Unix. + os.remove(result_path) + os.rename(os.path.join(tmp_dist_dir, result_basename), result_path) + + return result_basename + + + def build_wheel(self, wheel_directory, config_settings=None, + metadata_directory=None): + return self._build_with_temp_dir(['bdist_wheel'], '.whl', + wheel_directory, config_settings) + + def build_sdist(self, sdist_directory, config_settings=None): + return self._build_with_temp_dir(['sdist', '--formats', 'gztar'], + '.tar.gz', sdist_directory, + config_settings) + + +class _BuildMetaLegacyBackend(_BuildMetaBackend): + """Compatibility backend for setuptools + + This is a version of setuptools.build_meta that endeavors to maintain backwards + compatibility with pre-PEP 517 modes of invocation. It exists as a temporary + bridge between the old packaging mechanism and the new packaging mechanism, + and will eventually be removed. + """ + def run_setup(self, setup_script='setup.py'): + # In order to maintain compatibility with scripts assuming that + # the setup.py script is in a directory on the PYTHONPATH, inject + # '' into sys.path. (pypa/setuptools#1642) + sys_path = list(sys.path) # Save the original path + + script_dir = os.path.dirname(os.path.abspath(setup_script)) + if script_dir not in sys.path: + sys.path.insert(0, script_dir) + + try: + super(_BuildMetaLegacyBackend, + self).run_setup(setup_script=setup_script) + finally: + # While PEP 517 frontends should be calling each hook in a fresh + # subprocess according to the standard (and thus it should not be + # strictly necessary to restore the old sys.path), we'll restore + # the original path so that the path manipulation does not persist + # within the hook after run_setup is called. + sys.path[:] = sys_path + +# The primary backend +_BACKEND = _BuildMetaBackend() + +get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel +get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist +prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel +build_wheel = _BACKEND.build_wheel +build_sdist = _BACKEND.build_sdist + + +# The legacy backend +__legacy__ = _BuildMetaLegacyBackend() diff --git a/backend/test/lib/python3.8/site-packages/setuptools/cli-32.exe b/backend/test/lib/python3.8/site-packages/setuptools/cli-32.exe new file mode 100644 index 0000000000000000000000000000000000000000..b1487b7819e7286577a043c7726fbe0ca1543083 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/cli-32.exe differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/cli-64.exe b/backend/test/lib/python3.8/site-packages/setuptools/cli-64.exe new file mode 100644 index 0000000000000000000000000000000000000000..675e6bf3743f3d3011c238657e7128ee9960ef7f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/cli-64.exe differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/cli.exe b/backend/test/lib/python3.8/site-packages/setuptools/cli.exe new file mode 100644 index 0000000000000000000000000000000000000000..b1487b7819e7286577a043c7726fbe0ca1543083 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/cli.exe differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__init__.py b/backend/test/lib/python3.8/site-packages/setuptools/command/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..743f5588faf3ad79850df7bd196749e7a6c03f93 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/__init__.py @@ -0,0 +1,17 @@ +__all__ = [ + 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', + 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', + 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts', + 'bdist_wininst', 'upload_docs', 'build_clib', 'dist_info', +] + +from distutils.command.bdist import bdist +import sys + +from setuptools.command import install_scripts + +if 'egg' not in bdist.format_commands: + bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") + bdist.format_commands.append('egg') + +del bdist, sys diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b536290689aaa76d2c58d9984632f673d072a92 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/alias.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/alias.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f9c7910a03cb9baef917225ee2b6e33308a62bc0 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/alias.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6717fa5afe826448f0b11d2670aa84f7ba758058 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..18b933024809b3e1a1e68238ee528ade9114b27b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_wininst.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_wininst.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..287c7863620811238769b4b0b0f67dc25c04f565 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/bdist_wininst.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/build_clib.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/build_clib.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d1d7f41e06e68329f017f3673d764b2d66aca9c7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/build_clib.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/build_ext.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/build_ext.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..873d3ffcac2988cf6d0395e71ab6463f9a1be5f9 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/build_ext.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/build_py.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/build_py.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8264b4bb55ac0741bdaecf74136d28159e718bb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/build_py.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/develop.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/develop.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6b4f74b7d5de754c9da889c06df00c4779339c4 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/develop.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/dist_info.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/dist_info.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0431c75be4885a1b1ece6564b2cfddf71537fb0b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/dist_info.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/easy_install.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/easy_install.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..893d240ed41acd5117f72ee8f6eb2bf28ae1e939 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/easy_install.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/egg_info.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/egg_info.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5496b0a54167f2c648c9c05f2c1384d11b11eed Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/egg_info.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9cdc7338ac7ca69e9b3a2aad0d5f9e2e4b61b42a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b9d0e5d022e12936207d82b9e39345227eeea34 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install_lib.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install_lib.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a218a1d3da9b9a1288bcb635d051736c1d920da Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install_lib.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install_scripts.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install_scripts.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b07edf7bd9f3139c6ac1ee8ffebdb7ec00a2f13 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/install_scripts.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/py36compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/py36compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8fbe442d389bebc908acf89c7c5ef12a12de3764 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/py36compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/register.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/register.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ea556703bb19aff2b926762338e900d35f76ebc2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/register.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/rotate.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/rotate.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e595c20d305582821c469fbbbc12e127bbdc18cb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/rotate.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/saveopts.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/saveopts.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..88ff6e6b397aa52f2f9dc980434122d16fd747df Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/saveopts.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/sdist.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/sdist.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf7c4363e4215c3954f433b83d67ad8f1a35fcfc Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/sdist.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/setopt.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/setopt.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a0fe5299694d677586064ccbc6ca20a2f5a477a9 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/setopt.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/test.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/test.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6f2cc89bbc6b160bc1921b0194355c1222fbe47d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/test.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/upload.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/upload.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d6887deebf4936ef509d377d81396c99a080e733 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/upload.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/upload_docs.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/upload_docs.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..83176d1e65ffefe4c1c922ae2e1d7bdc5afe19b7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/command/__pycache__/upload_docs.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/alias.py b/backend/test/lib/python3.8/site-packages/setuptools/command/alias.py new file mode 100644 index 0000000000000000000000000000000000000000..4532b1cc0dca76227927e873f9c64f01008e565a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/alias.py @@ -0,0 +1,80 @@ +from distutils.errors import DistutilsOptionError + +from setuptools.extern.six.moves import map + +from setuptools.command.setopt import edit_config, option_base, config_file + + +def shquote(arg): + """Quote an argument for later parsing by shlex.split()""" + for c in '"', "'", "\\", "#": + if c in arg: + return repr(arg) + if arg.split() != [arg]: + return repr(arg) + return arg + + +class alias(option_base): + """Define a shortcut that invokes one or more commands""" + + description = "define a shortcut to invoke one or more commands" + command_consumes_arguments = True + + user_options = [ + ('remove', 'r', 'remove (unset) the alias'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.args = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.remove and len(self.args) != 1: + raise DistutilsOptionError( + "Must specify exactly one argument (the alias name) when " + "using --remove" + ) + + def run(self): + aliases = self.distribution.get_option_dict('aliases') + + if not self.args: + print("Command Aliases") + print("---------------") + for alias in aliases: + print("setup.py alias", format_alias(alias, aliases)) + return + + elif len(self.args) == 1: + alias, = self.args + if self.remove: + command = None + elif alias in aliases: + print("setup.py alias", format_alias(alias, aliases)) + return + else: + print("No alias definition found for %r" % alias) + return + else: + alias = self.args[0] + command = ' '.join(map(shquote, self.args[1:])) + + edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run) + + +def format_alias(name, aliases): + source, command = aliases[name] + if source == config_file('global'): + source = '--global-config ' + elif source == config_file('user'): + source = '--user-config ' + elif source == config_file('local'): + source = '' + else: + source = '--filename=%r' % source + return source + name + ' ' + command diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/bdist_egg.py b/backend/test/lib/python3.8/site-packages/setuptools/command/bdist_egg.py new file mode 100644 index 0000000000000000000000000000000000000000..98470f1715b21befab94b3e84428622a1ba86463 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/bdist_egg.py @@ -0,0 +1,502 @@ +"""setuptools.command.bdist_egg + +Build .egg distributions""" + +from distutils.errors import DistutilsSetupError +from distutils.dir_util import remove_tree, mkpath +from distutils import log +from types import CodeType +import sys +import os +import re +import textwrap +import marshal + +from setuptools.extern import six + +from pkg_resources import get_build_platform, Distribution, ensure_directory +from pkg_resources import EntryPoint +from setuptools.extension import Library +from setuptools import Command + +try: + # Python 2.7 or >=3.2 + from sysconfig import get_path, get_python_version + + def _get_purelib(): + return get_path("purelib") +except ImportError: + from distutils.sysconfig import get_python_lib, get_python_version + + def _get_purelib(): + return get_python_lib(False) + + +def strip_module(filename): + if '.' in filename: + filename = os.path.splitext(filename)[0] + if filename.endswith('module'): + filename = filename[:-6] + return filename + + +def sorted_walk(dir): + """Do os.walk in a reproducible way, + independent of indeterministic filesystem readdir order + """ + for base, dirs, files in os.walk(dir): + dirs.sort() + files.sort() + yield base, dirs, files + + +def write_stub(resource, pyfile): + _stub_template = textwrap.dedent(""" + def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__, %r) + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) + __bootstrap__() + """).lstrip() + with open(pyfile, 'w') as f: + f.write(_stub_template % resource) + + +class bdist_egg(Command): + description = "create an \"egg\" distribution" + + user_options = [ + ('bdist-dir=', 'b', + "temporary directory for creating the distribution"), + ('plat-name=', 'p', "platform name to embed in generated filenames " + "(default: %s)" % get_build_platform()), + ('exclude-source-files', None, + "remove all .py files from the generated egg"), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ] + + boolean_options = [ + 'keep-temp', 'skip-build', 'exclude-source-files' + ] + + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None + self.keep_temp = 0 + self.dist_dir = None + self.skip_build = 0 + self.egg_output = None + self.exclude_source_files = None + + def finalize_options(self): + ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") + self.egg_info = ei_cmd.egg_info + + if self.bdist_dir is None: + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'egg') + + if self.plat_name is None: + self.plat_name = get_build_platform() + + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + + if self.egg_output is None: + + # Compute filename of the output egg + basename = Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version, + get_python_version(), + self.distribution.has_ext_modules() and self.plat_name + ).egg_name() + + self.egg_output = os.path.join(self.dist_dir, basename + '.egg') + + def do_install_data(self): + # Hack for packages that install data to install's --install-lib + self.get_finalized_command('install').install_lib = self.bdist_dir + + site_packages = os.path.normcase(os.path.realpath(_get_purelib())) + old, self.distribution.data_files = self.distribution.data_files, [] + + for item in old: + if isinstance(item, tuple) and len(item) == 2: + if os.path.isabs(item[0]): + realpath = os.path.realpath(item[0]) + normalized = os.path.normcase(realpath) + if normalized == site_packages or normalized.startswith( + site_packages + os.sep + ): + item = realpath[len(site_packages) + 1:], item[1] + # XXX else: raise ??? + self.distribution.data_files.append(item) + + try: + log.info("installing package data to %s", self.bdist_dir) + self.call_command('install_data', force=0, root=None) + finally: + self.distribution.data_files = old + + def get_outputs(self): + return [self.egg_output] + + def call_command(self, cmdname, **kw): + """Invoke reinitialized command `cmdname` with keyword args""" + for dirname in INSTALL_DIRECTORY_ATTRS: + kw.setdefault(dirname, self.bdist_dir) + kw.setdefault('skip_build', self.skip_build) + kw.setdefault('dry_run', self.dry_run) + cmd = self.reinitialize_command(cmdname, **kw) + self.run_command(cmdname) + return cmd + + def run(self): + # Generate metadata first + self.run_command("egg_info") + # We run install_lib before install_data, because some data hacks + # pull their data path from the install_lib command. + log.info("installing library code to %s", self.bdist_dir) + instcmd = self.get_finalized_command('install') + old_root = instcmd.root + instcmd.root = None + if self.distribution.has_c_libraries() and not self.skip_build: + self.run_command('build_clib') + cmd = self.call_command('install_lib', warn_dir=0) + instcmd.root = old_root + + all_outputs, ext_outputs = self.get_ext_outputs() + self.stubs = [] + to_compile = [] + for (p, ext_name) in enumerate(ext_outputs): + filename, ext = os.path.splitext(ext_name) + pyfile = os.path.join(self.bdist_dir, strip_module(filename) + + '.py') + self.stubs.append(pyfile) + log.info("creating stub loader for %s", ext_name) + if not self.dry_run: + write_stub(os.path.basename(ext_name), pyfile) + to_compile.append(pyfile) + ext_outputs[p] = ext_name.replace(os.sep, '/') + + if to_compile: + cmd.byte_compile(to_compile) + if self.distribution.data_files: + self.do_install_data() + + # Make the EGG-INFO directory + archive_root = self.bdist_dir + egg_info = os.path.join(archive_root, 'EGG-INFO') + self.mkpath(egg_info) + if self.distribution.scripts: + script_dir = os.path.join(egg_info, 'scripts') + log.info("installing scripts to %s", script_dir) + self.call_command('install_scripts', install_dir=script_dir, + no_ep=1) + + self.copy_metadata_to(egg_info) + native_libs = os.path.join(egg_info, "native_libs.txt") + if all_outputs: + log.info("writing %s", native_libs) + if not self.dry_run: + ensure_directory(native_libs) + libs_file = open(native_libs, 'wt') + libs_file.write('\n'.join(all_outputs)) + libs_file.write('\n') + libs_file.close() + elif os.path.isfile(native_libs): + log.info("removing %s", native_libs) + if not self.dry_run: + os.unlink(native_libs) + + write_safety_flag( + os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() + ) + + if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): + log.warn( + "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + if self.exclude_source_files: + self.zap_pyfiles() + + # Make the archive + make_zipfile(self.egg_output, archive_root, verbose=self.verbose, + dry_run=self.dry_run, mode=self.gen_header()) + if not self.keep_temp: + remove_tree(self.bdist_dir, dry_run=self.dry_run) + + # Add to 'Distribution.dist_files' so that the "upload" command works + getattr(self.distribution, 'dist_files', []).append( + ('bdist_egg', get_python_version(), self.egg_output)) + + def zap_pyfiles(self): + log.info("Removing .py files from temporary directory") + for base, dirs, files in walk_egg(self.bdist_dir): + for name in files: + path = os.path.join(base, name) + + if name.endswith('.py'): + log.debug("Deleting %s", path) + os.unlink(path) + + if base.endswith('__pycache__'): + path_old = path + + pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc' + m = re.match(pattern, name) + path_new = os.path.join( + base, os.pardir, m.group('name') + '.pyc') + log.info( + "Renaming file from [%s] to [%s]" + % (path_old, path_new)) + try: + os.remove(path_new) + except OSError: + pass + os.rename(path_old, path_new) + + def zip_safe(self): + safe = getattr(self.distribution, 'zip_safe', None) + if safe is not None: + return safe + log.warn("zip_safe flag not set; analyzing archive contents...") + return analyze_egg(self.bdist_dir, self.stubs) + + def gen_header(self): + epm = EntryPoint.parse_map(self.distribution.entry_points or '') + ep = epm.get('setuptools.installation', {}).get('eggsecutable') + if ep is None: + return 'w' # not an eggsecutable, do it the usual way. + + if not ep.attrs or ep.extras: + raise DistutilsSetupError( + "eggsecutable entry point (%r) cannot have 'extras' " + "or refer to a module" % (ep,) + ) + + pyver = '{}.{}'.format(*sys.version_info) + pkg = ep.module_name + full = '.'.join(ep.attrs) + base = ep.attrs[0] + basename = os.path.basename(self.egg_output) + + header = ( + "#!/bin/sh\n" + 'if [ `basename $0` = "%(basename)s" ]\n' + 'then exec python%(pyver)s -c "' + "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " + "from %(pkg)s import %(base)s; sys.exit(%(full)s())" + '" "$@"\n' + 'else\n' + ' echo $0 is not the correct name for this egg file.\n' + ' echo Please rename it back to %(basename)s and try again.\n' + ' exec false\n' + 'fi\n' + ) % locals() + + if not self.dry_run: + mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) + f = open(self.egg_output, 'w') + f.write(header) + f.close() + return 'a' + + def copy_metadata_to(self, target_dir): + "Copy metadata (egg info) to the target_dir" + # normalize the path (so that a forward-slash in egg_info will + # match using startswith below) + norm_egg_info = os.path.normpath(self.egg_info) + prefix = os.path.join(norm_egg_info, '') + for path in self.ei_cmd.filelist.files: + if path.startswith(prefix): + target = os.path.join(target_dir, path[len(prefix):]) + ensure_directory(target) + self.copy_file(path, target) + + def get_ext_outputs(self): + """Get a list of relative paths to C extensions in the output distro""" + + all_outputs = [] + ext_outputs = [] + + paths = {self.bdist_dir: ''} + for base, dirs, files in sorted_walk(self.bdist_dir): + for filename in files: + if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: + all_outputs.append(paths[base] + filename) + for filename in dirs: + paths[os.path.join(base, filename)] = (paths[base] + + filename + '/') + + if self.distribution.has_ext_modules(): + build_cmd = self.get_finalized_command('build_ext') + for ext in build_cmd.extensions: + if isinstance(ext, Library): + continue + fullname = build_cmd.get_ext_fullname(ext.name) + filename = build_cmd.get_ext_filename(fullname) + if not os.path.basename(filename).startswith('dl-'): + if os.path.exists(os.path.join(self.bdist_dir, filename)): + ext_outputs.append(filename) + + return all_outputs, ext_outputs + + +NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) + + +def walk_egg(egg_dir): + """Walk an unpacked egg's contents, skipping the metadata directory""" + walker = sorted_walk(egg_dir) + base, dirs, files = next(walker) + if 'EGG-INFO' in dirs: + dirs.remove('EGG-INFO') + yield base, dirs, files + for bdf in walker: + yield bdf + + +def analyze_egg(egg_dir, stubs): + # check for existing flag in EGG-INFO + for flag, fn in safety_flags.items(): + if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)): + return flag + if not can_scan(): + return False + safe = True + for base, dirs, files in walk_egg(egg_dir): + for name in files: + if name.endswith('.py') or name.endswith('.pyw'): + continue + elif name.endswith('.pyc') or name.endswith('.pyo'): + # always scan, even if we already know we're not safe + safe = scan_module(egg_dir, base, name, stubs) and safe + return safe + + +def write_safety_flag(egg_dir, safe): + # Write or remove zip safety flag file(s) + for flag, fn in safety_flags.items(): + fn = os.path.join(egg_dir, fn) + if os.path.exists(fn): + if safe is None or bool(safe) != flag: + os.unlink(fn) + elif safe is not None and bool(safe) == flag: + f = open(fn, 'wt') + f.write('\n') + f.close() + + +safety_flags = { + True: 'zip-safe', + False: 'not-zip-safe', +} + + +def scan_module(egg_dir, base, name, stubs): + """Check whether module possibly uses unsafe-for-zipfile stuff""" + + filename = os.path.join(base, name) + if filename[:-1] in stubs: + return True # Extension module + pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') + module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] + if six.PY2: + skip = 8 # skip magic & date + elif sys.version_info < (3, 7): + skip = 12 # skip magic & date & file size + else: + skip = 16 # skip magic & reserved? & date & file size + f = open(filename, 'rb') + f.read(skip) + code = marshal.load(f) + f.close() + safe = True + symbols = dict.fromkeys(iter_symbols(code)) + for bad in ['__file__', '__path__']: + if bad in symbols: + log.warn("%s: module references %s", module, bad) + safe = False + if 'inspect' in symbols: + for bad in [ + 'getsource', 'getabsfile', 'getsourcefile', 'getfile' + 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', + 'getinnerframes', 'getouterframes', 'stack', 'trace' + ]: + if bad in symbols: + log.warn("%s: module MAY be using inspect.%s", module, bad) + safe = False + return safe + + +def iter_symbols(code): + """Yield names and strings used by `code` and its nested code objects""" + for name in code.co_names: + yield name + for const in code.co_consts: + if isinstance(const, six.string_types): + yield const + elif isinstance(const, CodeType): + for name in iter_symbols(const): + yield name + + +def can_scan(): + if not sys.platform.startswith('java') and sys.platform != 'cli': + # CPython, PyPy, etc. + return True + log.warn("Unable to analyze compiled code on this platform.") + log.warn("Please ask the author to include a 'zip_safe'" + " setting (either True or False) in the package's setup.py") + + +# Attribute names of options for commands that might need to be convinced to +# install to the egg build directory + +INSTALL_DIRECTORY_ATTRS = [ + 'install_lib', 'install_dir', 'install_data', 'install_base' +] + + +def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, + mode='w'): + """Create a zip file from all the files under 'base_dir'. The output + zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" + Python module (if available) or the InfoZIP "zip" utility (if installed + and found on the default search path). If neither tool is available, + raises DistutilsExecError. Returns the name of the output zip file. + """ + import zipfile + + mkpath(os.path.dirname(zip_filename), dry_run=dry_run) + log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) + + def visit(z, dirname, names): + for name in names: + path = os.path.normpath(os.path.join(dirname, name)) + if os.path.isfile(path): + p = path[len(base_dir) + 1:] + if not dry_run: + z.write(path, p) + log.debug("adding '%s'", p) + + compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED + if not dry_run: + z = zipfile.ZipFile(zip_filename, mode, compression=compression) + for dirname, dirs, files in sorted_walk(base_dir): + visit(z, dirname, files) + z.close() + else: + for dirname, dirs, files in sorted_walk(base_dir): + visit(None, dirname, files) + return zip_filename diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/bdist_rpm.py b/backend/test/lib/python3.8/site-packages/setuptools/command/bdist_rpm.py new file mode 100644 index 0000000000000000000000000000000000000000..70730927ecaed778ebbdee98eb37c24ec3f1a8e6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/bdist_rpm.py @@ -0,0 +1,43 @@ +import distutils.command.bdist_rpm as orig + + +class bdist_rpm(orig.bdist_rpm): + """ + Override the default bdist_rpm behavior to do the following: + + 1. Run egg_info to ensure the name and version are properly calculated. + 2. Always run 'install' using --single-version-externally-managed to + disable eggs in RPM distributions. + 3. Replace dash with underscore in the version numbers for better RPM + compatibility. + """ + + def run(self): + # ensure distro name is up-to-date + self.run_command('egg_info') + + orig.bdist_rpm.run(self) + + def _make_spec_file(self): + version = self.distribution.get_version() + rpmversion = version.replace('-', '_') + spec = orig.bdist_rpm._make_spec_file(self) + line23 = '%define version ' + version + line24 = '%define version ' + rpmversion + spec = [ + line.replace( + "Source0: %{name}-%{version}.tar", + "Source0: %{name}-%{unmangled_version}.tar" + ).replace( + "setup.py install ", + "setup.py install --single-version-externally-managed " + ).replace( + "%setup", + "%setup -n %{name}-%{unmangled_version}" + ).replace(line23, line24) + for line in spec + ] + insert_loc = spec.index(line24) + 1 + unmangled_version = "%define unmangled_version " + version + spec.insert(insert_loc, unmangled_version) + return spec diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/bdist_wininst.py b/backend/test/lib/python3.8/site-packages/setuptools/command/bdist_wininst.py new file mode 100644 index 0000000000000000000000000000000000000000..073de97b46c92e2e221cade8c1350ab2c5cff891 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/bdist_wininst.py @@ -0,0 +1,21 @@ +import distutils.command.bdist_wininst as orig + + +class bdist_wininst(orig.bdist_wininst): + def reinitialize_command(self, command, reinit_subcommands=0): + """ + Supplement reinitialize_command to work around + http://bugs.python.org/issue20819 + """ + cmd = self.distribution.reinitialize_command( + command, reinit_subcommands) + if command in ('install', 'install_lib'): + cmd.install_lib = None + return cmd + + def run(self): + self._is_running = True + try: + orig.bdist_wininst.run(self) + finally: + self._is_running = False diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/build_clib.py b/backend/test/lib/python3.8/site-packages/setuptools/command/build_clib.py new file mode 100644 index 0000000000000000000000000000000000000000..09caff6ffde8fc3f368cb635dc3cbbbc8851530d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/build_clib.py @@ -0,0 +1,98 @@ +import distutils.command.build_clib as orig +from distutils.errors import DistutilsSetupError +from distutils import log +from setuptools.dep_util import newer_pairwise_group + + +class build_clib(orig.build_clib): + """ + Override the default build_clib behaviour to do the following: + + 1. Implement a rudimentary timestamp-based dependency system + so 'compile()' doesn't run every time. + 2. Add more keys to the 'build_info' dictionary: + * obj_deps - specify dependencies for each object compiled. + this should be a dictionary mapping a key + with the source filename to a list of + dependencies. Use an empty string for global + dependencies. + * cflags - specify a list of additional flags to pass to + the compiler. + """ + + def build_libraries(self, libraries): + for (lib_name, build_info) in libraries: + sources = build_info.get('sources') + if sources is None or not isinstance(sources, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames" % lib_name) + sources = list(sources) + + log.info("building '%s' library", lib_name) + + # Make sure everything is the correct type. + # obj_deps should be a dictionary of keys as sources + # and a list/tuple of files that are its dependencies. + obj_deps = build_info.get('obj_deps', dict()) + if not isinstance(obj_deps, dict): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'obj_deps' must be a dictionary of " + "type 'source: list'" % lib_name) + dependencies = [] + + # Get the global dependencies that are specified by the '' key. + # These will go into every source's dependency list. + global_deps = obj_deps.get('', list()) + if not isinstance(global_deps, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'obj_deps' must be a dictionary of " + "type 'source: list'" % lib_name) + + # Build the list to be used by newer_pairwise_group + # each source will be auto-added to its dependencies. + for source in sources: + src_deps = [source] + src_deps.extend(global_deps) + extra_deps = obj_deps.get(source, list()) + if not isinstance(extra_deps, (list, tuple)): + raise DistutilsSetupError( + "in 'libraries' option (library '%s'), " + "'obj_deps' must be a dictionary of " + "type 'source: list'" % lib_name) + src_deps.extend(extra_deps) + dependencies.append(src_deps) + + expected_objects = self.compiler.object_filenames( + sources, + output_dir=self.build_temp + ) + + if newer_pairwise_group(dependencies, expected_objects) != ([], []): + # First, compile the source code to object files in the library + # directory. (This should probably change to putting object + # files in a temporary build directory.) + macros = build_info.get('macros') + include_dirs = build_info.get('include_dirs') + cflags = build_info.get('cflags') + objects = self.compiler.compile( + sources, + output_dir=self.build_temp, + macros=macros, + include_dirs=include_dirs, + extra_postargs=cflags, + debug=self.debug + ) + + # Now "link" the object files together into a static library. + # (On Unix at least, this isn't really linking -- it just + # builds an archive. Whatever.) + self.compiler.create_static_lib( + expected_objects, + lib_name, + output_dir=self.build_clib, + debug=self.debug + ) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/build_ext.py b/backend/test/lib/python3.8/site-packages/setuptools/command/build_ext.py new file mode 100644 index 0000000000000000000000000000000000000000..daa8e4fe81c18e8fc3e07718b4a66137b062127e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/build_ext.py @@ -0,0 +1,327 @@ +import os +import sys +import itertools +from distutils.command.build_ext import build_ext as _du_build_ext +from distutils.file_util import copy_file +from distutils.ccompiler import new_compiler +from distutils.sysconfig import customize_compiler, get_config_var +from distutils.errors import DistutilsError +from distutils import log + +from setuptools.extension import Library +from setuptools.extern import six + +if six.PY2: + import imp + + EXTENSION_SUFFIXES = [s for s, _, tp in imp.get_suffixes() if tp == imp.C_EXTENSION] +else: + from importlib.machinery import EXTENSION_SUFFIXES + +try: + # Attempt to use Cython for building extensions, if available + from Cython.Distutils.build_ext import build_ext as _build_ext + # Additionally, assert that the compiler module will load + # also. Ref #1229. + __import__('Cython.Compiler.Main') +except ImportError: + _build_ext = _du_build_ext + +# make sure _config_vars is initialized +get_config_var("LDSHARED") +from distutils.sysconfig import _config_vars as _CONFIG_VARS + + +def _customize_compiler_for_shlib(compiler): + if sys.platform == "darwin": + # building .dylib requires additional compiler flags on OSX; here we + # temporarily substitute the pyconfig.h variables so that distutils' + # 'customize_compiler' uses them before we build the shared libraries. + tmp = _CONFIG_VARS.copy() + try: + # XXX Help! I don't have any idea whether these are right... + _CONFIG_VARS['LDSHARED'] = ( + "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") + _CONFIG_VARS['CCSHARED'] = " -dynamiclib" + _CONFIG_VARS['SO'] = ".dylib" + customize_compiler(compiler) + finally: + _CONFIG_VARS.clear() + _CONFIG_VARS.update(tmp) + else: + customize_compiler(compiler) + + +have_rtld = False +use_stubs = False +libtype = 'shared' + +if sys.platform == "darwin": + use_stubs = True +elif os.name != 'nt': + try: + import dl + use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') + except ImportError: + pass + +if_dl = lambda s: s if have_rtld else '' + + +def get_abi3_suffix(): + """Return the file extension for an abi3-compliant Extension()""" + for suffix in EXTENSION_SUFFIXES: + if '.abi3' in suffix: # Unix + return suffix + elif suffix == '.pyd': # Windows + return suffix + + +class build_ext(_build_ext): + def run(self): + """Build extensions in build directory, then copy if --inplace""" + old_inplace, self.inplace = self.inplace, 0 + _build_ext.run(self) + self.inplace = old_inplace + if old_inplace: + self.copy_extensions_to_source() + + def copy_extensions_to_source(self): + build_py = self.get_finalized_command('build_py') + for ext in self.extensions: + fullname = self.get_ext_fullname(ext.name) + filename = self.get_ext_filename(fullname) + modpath = fullname.split('.') + package = '.'.join(modpath[:-1]) + package_dir = build_py.get_package_dir(package) + dest_filename = os.path.join(package_dir, + os.path.basename(filename)) + src_filename = os.path.join(self.build_lib, filename) + + # Always copy, even if source is older than destination, to ensure + # that the right extensions for the current Python/platform are + # used. + copy_file( + src_filename, dest_filename, verbose=self.verbose, + dry_run=self.dry_run + ) + if ext._needs_stub: + self.write_stub(package_dir or os.curdir, ext, True) + + def get_ext_filename(self, fullname): + filename = _build_ext.get_ext_filename(self, fullname) + if fullname in self.ext_map: + ext = self.ext_map[fullname] + use_abi3 = ( + six.PY3 + and getattr(ext, 'py_limited_api') + and get_abi3_suffix() + ) + if use_abi3: + so_ext = get_config_var('EXT_SUFFIX') + filename = filename[:-len(so_ext)] + filename = filename + get_abi3_suffix() + if isinstance(ext, Library): + fn, ext = os.path.splitext(filename) + return self.shlib_compiler.library_filename(fn, libtype) + elif use_stubs and ext._links_to_dynamic: + d, fn = os.path.split(filename) + return os.path.join(d, 'dl-' + fn) + return filename + + def initialize_options(self): + _build_ext.initialize_options(self) + self.shlib_compiler = None + self.shlibs = [] + self.ext_map = {} + + def finalize_options(self): + _build_ext.finalize_options(self) + self.extensions = self.extensions or [] + self.check_extensions_list(self.extensions) + self.shlibs = [ext for ext in self.extensions + if isinstance(ext, Library)] + if self.shlibs: + self.setup_shlib_compiler() + for ext in self.extensions: + ext._full_name = self.get_ext_fullname(ext.name) + for ext in self.extensions: + fullname = ext._full_name + self.ext_map[fullname] = ext + + # distutils 3.1 will also ask for module names + # XXX what to do with conflicts? + self.ext_map[fullname.split('.')[-1]] = ext + + ltd = self.shlibs and self.links_to_dynamic(ext) or False + ns = ltd and use_stubs and not isinstance(ext, Library) + ext._links_to_dynamic = ltd + ext._needs_stub = ns + filename = ext._file_name = self.get_ext_filename(fullname) + libdir = os.path.dirname(os.path.join(self.build_lib, filename)) + if ltd and libdir not in ext.library_dirs: + ext.library_dirs.append(libdir) + if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: + ext.runtime_library_dirs.append(os.curdir) + + def setup_shlib_compiler(self): + compiler = self.shlib_compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=self.force + ) + _customize_compiler_for_shlib(compiler) + + if self.include_dirs is not None: + compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name, value) in self.define: + compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + compiler.undefine_macro(macro) + if self.libraries is not None: + compiler.set_libraries(self.libraries) + if self.library_dirs is not None: + compiler.set_library_dirs(self.library_dirs) + if self.rpath is not None: + compiler.set_runtime_library_dirs(self.rpath) + if self.link_objects is not None: + compiler.set_link_objects(self.link_objects) + + # hack so distutils' build_extension() builds a library instead + compiler.link_shared_object = link_shared_object.__get__(compiler) + + def get_export_symbols(self, ext): + if isinstance(ext, Library): + return ext.export_symbols + return _build_ext.get_export_symbols(self, ext) + + def build_extension(self, ext): + ext._convert_pyx_sources_to_lang() + _compiler = self.compiler + try: + if isinstance(ext, Library): + self.compiler = self.shlib_compiler + _build_ext.build_extension(self, ext) + if ext._needs_stub: + cmd = self.get_finalized_command('build_py').build_lib + self.write_stub(cmd, ext) + finally: + self.compiler = _compiler + + def links_to_dynamic(self, ext): + """Return true if 'ext' links to a dynamic lib in the same package""" + # XXX this should check to ensure the lib is actually being built + # XXX as dynamic, and not just using a locally-found version or a + # XXX static-compiled version + libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) + pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) + return any(pkg + libname in libnames for libname in ext.libraries) + + def get_outputs(self): + return _build_ext.get_outputs(self) + self.__get_stubs_outputs() + + def __get_stubs_outputs(self): + # assemble the base name for each extension that needs a stub + ns_ext_bases = ( + os.path.join(self.build_lib, *ext._full_name.split('.')) + for ext in self.extensions + if ext._needs_stub + ) + # pair each base with the extension + pairs = itertools.product(ns_ext_bases, self.__get_output_extensions()) + return list(base + fnext for base, fnext in pairs) + + def __get_output_extensions(self): + yield '.py' + yield '.pyc' + if self.get_finalized_command('build_py').optimize: + yield '.pyo' + + def write_stub(self, output_dir, ext, compile=False): + log.info("writing stub loader for %s to %s", ext._full_name, + output_dir) + stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + + '.py') + if compile and os.path.exists(stub_file): + raise DistutilsError(stub_file + " already exists! Please delete.") + if not self.dry_run: + f = open(stub_file, 'w') + f.write( + '\n'.join([ + "def __bootstrap__():", + " global __bootstrap__, __file__, __loader__", + " import sys, os, pkg_resources, imp" + if_dl(", dl"), + " __file__ = pkg_resources.resource_filename" + "(__name__,%r)" + % os.path.basename(ext._file_name), + " del __bootstrap__", + " if '__loader__' in globals():", + " del __loader__", + if_dl(" old_flags = sys.getdlopenflags()"), + " old_dir = os.getcwd()", + " try:", + " os.chdir(os.path.dirname(__file__))", + if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), + " imp.load_dynamic(__name__,__file__)", + " finally:", + if_dl(" sys.setdlopenflags(old_flags)"), + " os.chdir(old_dir)", + "__bootstrap__()", + "" # terminal \n + ]) + ) + f.close() + if compile: + from distutils.util import byte_compile + + byte_compile([stub_file], optimize=0, + force=True, dry_run=self.dry_run) + optimize = self.get_finalized_command('install_lib').optimize + if optimize > 0: + byte_compile([stub_file], optimize=optimize, + force=True, dry_run=self.dry_run) + if os.path.exists(stub_file) and not self.dry_run: + os.unlink(stub_file) + + +if use_stubs or os.name == 'nt': + # Build shared libraries + # + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + self.link( + self.SHARED_LIBRARY, objects, output_libname, + output_dir, libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, extra_preargs, extra_postargs, + build_temp, target_lang + ) +else: + # Build static libraries everywhere else + libtype = 'static' + + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + # XXX we need to either disallow these attrs on Library instances, + # or warn/abort here if set, or something... + # libraries=None, library_dirs=None, runtime_library_dirs=None, + # export_symbols=None, extra_preargs=None, extra_postargs=None, + # build_temp=None + + assert output_dir is None # distutils build_ext doesn't pass this + output_dir, filename = os.path.split(output_libname) + basename, ext = os.path.splitext(filename) + if self.library_filename("x").startswith('lib'): + # strip 'lib' prefix; this is kludgy if some platform uses + # a different prefix + basename = basename[3:] + + self.create_static_lib( + objects, basename, output_dir, debug, target_lang + ) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/build_py.py b/backend/test/lib/python3.8/site-packages/setuptools/command/build_py.py new file mode 100644 index 0000000000000000000000000000000000000000..b0314fd413ae7f8c1027ccde0b092fd493fb104b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/build_py.py @@ -0,0 +1,270 @@ +from glob import glob +from distutils.util import convert_path +import distutils.command.build_py as orig +import os +import fnmatch +import textwrap +import io +import distutils.errors +import itertools + +from setuptools.extern import six +from setuptools.extern.six.moves import map, filter, filterfalse + +try: + from setuptools.lib2to3_ex import Mixin2to3 +except ImportError: + + class Mixin2to3: + def run_2to3(self, files, doctests=True): + "do nothing" + + +class build_py(orig.build_py, Mixin2to3): + """Enhanced 'build_py' command that includes data files with packages + + The data files are specified via a 'package_data' argument to 'setup()'. + See 'setuptools.dist.Distribution' for more details. + + Also, this version of the 'build_py' command allows you to specify both + 'py_modules' and 'packages' in the same setup operation. + """ + + def finalize_options(self): + orig.build_py.finalize_options(self) + self.package_data = self.distribution.package_data + self.exclude_package_data = (self.distribution.exclude_package_data or + {}) + if 'data_files' in self.__dict__: + del self.__dict__['data_files'] + self.__updated_files = [] + self.__doctests_2to3 = [] + + def run(self): + """Build modules, packages, and copy data files to build directory""" + if not self.py_modules and not self.packages: + return + + if self.py_modules: + self.build_modules() + + if self.packages: + self.build_packages() + self.build_package_data() + + self.run_2to3(self.__updated_files, False) + self.run_2to3(self.__updated_files, True) + self.run_2to3(self.__doctests_2to3, True) + + # Only compile actual .py files, using our base class' idea of what our + # output files are. + self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) + + def __getattr__(self, attr): + "lazily compute data files" + if attr == 'data_files': + self.data_files = self._get_data_files() + return self.data_files + return orig.build_py.__getattr__(self, attr) + + def build_module(self, module, module_file, package): + if six.PY2 and isinstance(package, six.string_types): + # avoid errors on Python 2 when unicode is passed (#190) + package = package.split('.') + outfile, copied = orig.build_py.build_module(self, module, module_file, + package) + if copied: + self.__updated_files.append(outfile) + return outfile, copied + + def _get_data_files(self): + """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" + self.analyze_manifest() + return list(map(self._get_pkg_data_files, self.packages or ())) + + def _get_pkg_data_files(self, package): + # Locate package source directory + src_dir = self.get_package_dir(package) + + # Compute package build directory + build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + + # Strip directory from globbed filenames + filenames = [ + os.path.relpath(file, src_dir) + for file in self.find_data_files(package, src_dir) + ] + return package, src_dir, build_dir, filenames + + def find_data_files(self, package, src_dir): + """Return filenames for package's data files in 'src_dir'""" + patterns = self._get_platform_patterns( + self.package_data, + package, + src_dir, + ) + globs_expanded = map(glob, patterns) + # flatten the expanded globs into an iterable of matches + globs_matches = itertools.chain.from_iterable(globs_expanded) + glob_files = filter(os.path.isfile, globs_matches) + files = itertools.chain( + self.manifest_files.get(package, []), + glob_files, + ) + return self.exclude_data_files(package, src_dir, files) + + def build_package_data(self): + """Copy data files into build directory""" + for package, src_dir, build_dir, filenames in self.data_files: + for filename in filenames: + target = os.path.join(build_dir, filename) + self.mkpath(os.path.dirname(target)) + srcfile = os.path.join(src_dir, filename) + outf, copied = self.copy_file(srcfile, target) + srcfile = os.path.abspath(srcfile) + if (copied and + srcfile in self.distribution.convert_2to3_doctests): + self.__doctests_2to3.append(outf) + + def analyze_manifest(self): + self.manifest_files = mf = {} + if not self.distribution.include_package_data: + return + src_dirs = {} + for package in self.packages or (): + # Locate package source directory + src_dirs[assert_relative(self.get_package_dir(package))] = package + + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + for path in ei_cmd.filelist.files: + d, f = os.path.split(assert_relative(path)) + prev = None + oldf = f + while d and d != prev and d not in src_dirs: + prev = d + d, df = os.path.split(d) + f = os.path.join(df, f) + if d in src_dirs: + if path.endswith('.py') and f == oldf: + continue # it's a module, not data + mf.setdefault(src_dirs[d], []).append(path) + + def get_data_files(self): + pass # Lazily compute data files in _get_data_files() function. + + def check_package(self, package, package_dir): + """Check namespace packages' __init__ for declare_namespace""" + try: + return self.packages_checked[package] + except KeyError: + pass + + init_py = orig.build_py.check_package(self, package, package_dir) + self.packages_checked[package] = init_py + + if not init_py or not self.distribution.namespace_packages: + return init_py + + for pkg in self.distribution.namespace_packages: + if pkg == package or pkg.startswith(package + '.'): + break + else: + return init_py + + with io.open(init_py, 'rb') as f: + contents = f.read() + if b'declare_namespace' not in contents: + raise distutils.errors.DistutilsError( + "Namespace package problem: %s is a namespace package, but " + "its\n__init__.py does not call declare_namespace()! Please " + 'fix it.\n(See the setuptools manual under ' + '"Namespace Packages" for details.)\n"' % (package,) + ) + return init_py + + def initialize_options(self): + self.packages_checked = {} + orig.build_py.initialize_options(self) + + def get_package_dir(self, package): + res = orig.build_py.get_package_dir(self, package) + if self.distribution.src_root is not None: + return os.path.join(self.distribution.src_root, res) + return res + + def exclude_data_files(self, package, src_dir, files): + """Filter filenames for package's data files in 'src_dir'""" + files = list(files) + patterns = self._get_platform_patterns( + self.exclude_package_data, + package, + src_dir, + ) + match_groups = ( + fnmatch.filter(files, pattern) + for pattern in patterns + ) + # flatten the groups of matches into an iterable of matches + matches = itertools.chain.from_iterable(match_groups) + bad = set(matches) + keepers = ( + fn + for fn in files + if fn not in bad + ) + # ditch dupes + return list(_unique_everseen(keepers)) + + @staticmethod + def _get_platform_patterns(spec, package, src_dir): + """ + yield platform-specific path patterns (suitable for glob + or fn_match) from a glob-based spec (such as + self.package_data or self.exclude_package_data) + matching package in src_dir. + """ + raw_patterns = itertools.chain( + spec.get('', []), + spec.get(package, []), + ) + return ( + # Each pattern has to be converted to a platform-specific path + os.path.join(src_dir, convert_path(pattern)) + for pattern in raw_patterns + ) + + +# from Python docs +def _unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + +def assert_relative(path): + if not os.path.isabs(path): + return path + from distutils.errors import DistutilsSetupError + + msg = textwrap.dedent(""" + Error: setup script specifies an absolute path: + + %s + + setup() arguments must *always* be /-separated paths relative to the + setup.py directory, *never* absolute paths. + """).lstrip() % path + raise DistutilsSetupError(msg) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/develop.py b/backend/test/lib/python3.8/site-packages/setuptools/command/develop.py new file mode 100644 index 0000000000000000000000000000000000000000..009e4f9368f5b29fffd160f3b712fb0cd19807bd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/develop.py @@ -0,0 +1,221 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsError, DistutilsOptionError +import os +import glob +import io + +from setuptools.extern import six + +import pkg_resources +from setuptools.command.easy_install import easy_install +from setuptools import namespaces +import setuptools + +__metaclass__ = type + + +class develop(namespaces.DevelopInstaller, easy_install): + """Set up package for development""" + + description = "install package in 'development mode'" + + user_options = easy_install.user_options + [ + ("uninstall", "u", "Uninstall this source package"), + ("egg-path=", None, "Set the path to be used in the .egg-link file"), + ] + + boolean_options = easy_install.boolean_options + ['uninstall'] + + command_consumes_arguments = False # override base + + def run(self): + if self.uninstall: + self.multi_version = True + self.uninstall_link() + self.uninstall_namespaces() + else: + self.install_for_development() + self.warn_deprecated_options() + + def initialize_options(self): + self.uninstall = None + self.egg_path = None + easy_install.initialize_options(self) + self.setup_path = None + self.always_copy_from = '.' # always copy eggs installed in curdir + + def finalize_options(self): + ei = self.get_finalized_command("egg_info") + if ei.broken_egg_info: + template = "Please rename %r to %r before using 'develop'" + args = ei.egg_info, ei.broken_egg_info + raise DistutilsError(template % args) + self.args = [ei.egg_name] + + easy_install.finalize_options(self) + self.expand_basedirs() + self.expand_dirs() + # pick up setup-dir .egg files only: no .egg-info + self.package_index.scan(glob.glob('*.egg')) + + egg_link_fn = ei.egg_name + '.egg-link' + self.egg_link = os.path.join(self.install_dir, egg_link_fn) + self.egg_base = ei.egg_base + if self.egg_path is None: + self.egg_path = os.path.abspath(ei.egg_base) + + target = pkg_resources.normalize_path(self.egg_base) + egg_path = pkg_resources.normalize_path( + os.path.join(self.install_dir, self.egg_path)) + if egg_path != target: + raise DistutilsOptionError( + "--egg-path must be a relative path from the install" + " directory to " + target + ) + + # Make a distribution for the package's source + self.dist = pkg_resources.Distribution( + target, + pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)), + project_name=ei.egg_name + ) + + self.setup_path = self._resolve_setup_path( + self.egg_base, + self.install_dir, + self.egg_path, + ) + + @staticmethod + def _resolve_setup_path(egg_base, install_dir, egg_path): + """ + Generate a path from egg_base back to '.' where the + setup script resides and ensure that path points to the + setup path from $install_dir/$egg_path. + """ + path_to_setup = egg_base.replace(os.sep, '/').rstrip('/') + if path_to_setup != os.curdir: + path_to_setup = '../' * (path_to_setup.count('/') + 1) + resolved = pkg_resources.normalize_path( + os.path.join(install_dir, egg_path, path_to_setup) + ) + if resolved != pkg_resources.normalize_path(os.curdir): + raise DistutilsOptionError( + "Can't get a consistent path to setup script from" + " installation directory", resolved, + pkg_resources.normalize_path(os.curdir)) + return path_to_setup + + def install_for_development(self): + if six.PY3 and getattr(self.distribution, 'use_2to3', False): + # If we run 2to3 we can not do this inplace: + + # Ensure metadata is up-to-date + self.reinitialize_command('build_py', inplace=0) + self.run_command('build_py') + bpy_cmd = self.get_finalized_command("build_py") + build_path = pkg_resources.normalize_path(bpy_cmd.build_lib) + + # Build extensions + self.reinitialize_command('egg_info', egg_base=build_path) + self.run_command('egg_info') + + self.reinitialize_command('build_ext', inplace=0) + self.run_command('build_ext') + + # Fixup egg-link and easy-install.pth + ei_cmd = self.get_finalized_command("egg_info") + self.egg_path = build_path + self.dist.location = build_path + # XXX + self.dist._provider = pkg_resources.PathMetadata( + build_path, ei_cmd.egg_info) + else: + # Without 2to3 inplace works fine: + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + self.install_site_py() # ensure that target dir is site-safe + if setuptools.bootstrap_install_from: + self.easy_install(setuptools.bootstrap_install_from) + setuptools.bootstrap_install_from = None + + self.install_namespaces() + + # create an .egg-link in the installation dir, pointing to our egg + log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) + if not self.dry_run: + with open(self.egg_link, "w") as f: + f.write(self.egg_path + "\n" + self.setup_path) + # postprocess the installed distro, fixing up .pth, installing scripts, + # and handling requirements + self.process_distribution(None, self.dist, not self.no_deps) + + def uninstall_link(self): + if os.path.exists(self.egg_link): + log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) + egg_link_file = open(self.egg_link) + contents = [line.rstrip() for line in egg_link_file] + egg_link_file.close() + if contents not in ([self.egg_path], + [self.egg_path, self.setup_path]): + log.warn("Link points to %s: uninstall aborted", contents) + return + if not self.dry_run: + os.unlink(self.egg_link) + if not self.dry_run: + self.update_pth(self.dist) # remove any .pth link to us + if self.distribution.scripts: + # XXX should also check for entry point scripts! + log.warn("Note: you must uninstall or replace scripts manually!") + + def install_egg_scripts(self, dist): + if dist is not self.dist: + # Installing a dependency, so fall back to normal behavior + return easy_install.install_egg_scripts(self, dist) + + # create wrapper scripts in the script dir, pointing to dist.scripts + + # new-style... + self.install_wrapper_scripts(dist) + + # ...and old-style + for script_name in self.distribution.scripts or []: + script_path = os.path.abspath(convert_path(script_name)) + script_name = os.path.basename(script_path) + with io.open(script_path) as strm: + script_text = strm.read() + self.install_script(dist, script_name, script_text, script_path) + + def install_wrapper_scripts(self, dist): + dist = VersionlessRequirement(dist) + return easy_install.install_wrapper_scripts(self, dist) + + +class VersionlessRequirement: + """ + Adapt a pkg_resources.Distribution to simply return the project + name as the 'requirement' so that scripts will work across + multiple versions. + + >>> from pkg_resources import Distribution + >>> dist = Distribution(project_name='foo', version='1.0') + >>> str(dist.as_requirement()) + 'foo==1.0' + >>> adapted_dist = VersionlessRequirement(dist) + >>> str(adapted_dist.as_requirement()) + 'foo' + """ + + def __init__(self, dist): + self.__dist = dist + + def __getattr__(self, name): + return getattr(self.__dist, name) + + def as_requirement(self): + return self.project_name diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/dist_info.py b/backend/test/lib/python3.8/site-packages/setuptools/command/dist_info.py new file mode 100644 index 0000000000000000000000000000000000000000..c45258fa03a3ddd6a73db4514365f8741d16ca86 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/dist_info.py @@ -0,0 +1,36 @@ +""" +Create a dist_info directory +As defined in the wheel specification +""" + +import os + +from distutils.core import Command +from distutils import log + + +class dist_info(Command): + + description = 'create a .dist-info directory' + + user_options = [ + ('egg-base=', 'e', "directory containing .egg-info directories" + " (default: top of the source tree)"), + ] + + def initialize_options(self): + self.egg_base = None + + def finalize_options(self): + pass + + def run(self): + egg_info = self.get_finalized_command('egg_info') + egg_info.egg_base = self.egg_base + egg_info.finalize_options() + egg_info.run() + dist_info_dir = egg_info.egg_info[:-len('.egg-info')] + '.dist-info' + log.info("creating '{}'".format(os.path.abspath(dist_info_dir))) + + bdist_wheel = self.get_finalized_command('bdist_wheel') + bdist_wheel.egg2dist(egg_info.egg_info, dist_info_dir) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/easy_install.py b/backend/test/lib/python3.8/site-packages/setuptools/command/easy_install.py new file mode 100644 index 0000000000000000000000000000000000000000..1f6839cb3b78fe8d63f709b6ff9abb15bf276b6e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/easy_install.py @@ -0,0 +1,2402 @@ +#!/usr/bin/env python +""" +Easy Install +------------ + +A tool for doing automatic download/extract/build of distutils-based Python +packages. For detailed documentation, see the accompanying EasyInstall.txt +file, or visit the `EasyInstall home page`__. + +__ https://setuptools.readthedocs.io/en/latest/easy_install.html + +""" + +from glob import glob +from distutils.util import get_platform +from distutils.util import convert_path, subst_vars +from distutils.errors import ( + DistutilsArgError, DistutilsOptionError, + DistutilsError, DistutilsPlatformError, +) +from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS +from distutils import log, dir_util +from distutils.command.build_scripts import first_line_re +from distutils.spawn import find_executable +import sys +import os +import zipimport +import shutil +import tempfile +import zipfile +import re +import stat +import random +import textwrap +import warnings +import site +import struct +import contextlib +import subprocess +import shlex +import io + + +from sysconfig import get_config_vars, get_path + +from setuptools import SetuptoolsDeprecationWarning + +from setuptools.extern import six +from setuptools.extern.six.moves import configparser, map + +from setuptools import Command +from setuptools.sandbox import run_setup +from setuptools.py27compat import rmtree_safe +from setuptools.command import setopt +from setuptools.archive_util import unpack_archive +from setuptools.package_index import ( + PackageIndex, parse_requirement_arg, URL_SCHEME, +) +from setuptools.command import bdist_egg, egg_info +from setuptools.wheel import Wheel +from pkg_resources import ( + yield_lines, normalize_path, resource_string, ensure_directory, + get_distribution, find_distributions, Environment, Requirement, + Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, + VersionConflict, DEVELOP_DIST, +) +import pkg_resources.py31compat + +__metaclass__ = type + +# Turn on PEP440Warnings +warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) + +__all__ = [ + 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', + 'main', 'get_exe_prefixes', +] + + +def is_64bit(): + return struct.calcsize("P") == 8 + + +def samefile(p1, p2): + """ + Determine if two paths reference the same file. + + Augments os.path.samefile to work on Windows and + suppresses errors if the path doesn't exist. + """ + both_exist = os.path.exists(p1) and os.path.exists(p2) + use_samefile = hasattr(os.path, 'samefile') and both_exist + if use_samefile: + return os.path.samefile(p1, p2) + norm_p1 = os.path.normpath(os.path.normcase(p1)) + norm_p2 = os.path.normpath(os.path.normcase(p2)) + return norm_p1 == norm_p2 + + +if six.PY2: + + def _to_bytes(s): + return s + + def isascii(s): + try: + six.text_type(s, 'ascii') + return True + except UnicodeError: + return False +else: + + def _to_bytes(s): + return s.encode('utf8') + + def isascii(s): + try: + s.encode('ascii') + return True + except UnicodeError: + return False + + +_one_liner = lambda text: textwrap.dedent(text).strip().replace('\n', '; ') + + +class easy_install(Command): + """Manage a download/build/install process""" + description = "Find/get/install Python packages" + command_consumes_arguments = True + + user_options = [ + ('prefix=', None, "installation prefix"), + ("zip-ok", "z", "install package as a zipfile"), + ("multi-version", "m", "make apps have to require() a version"), + ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), + ("install-dir=", "d", "install package to DIR"), + ("script-dir=", "s", "install scripts to DIR"), + ("exclude-scripts", "x", "Don't install scripts"), + ("always-copy", "a", "Copy all needed packages to install dir"), + ("index-url=", "i", "base URL of Python Package Index"), + ("find-links=", "f", "additional URL(s) to search for packages"), + ("build-directory=", "b", + "download/extract/build in DIR; keep the results"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('record=', None, + "filename in which to record list of installed files"), + ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), + ('site-dirs=', 'S', "list of directories where .pth files work"), + ('editable', 'e', "Install specified packages in editable form"), + ('no-deps', 'N', "don't install dependencies"), + ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), + ('local-snapshots-ok', 'l', + "allow building eggs from local checkouts"), + ('version', None, "print version information and exit"), + ('install-layout=', None, "installation layout to choose (known values: deb)"), + ('force-installation-into-system-dir', '0', "force installation into /usr"), + ('no-find-links', None, + "Don't load find-links defined in packages being installed") + ] + boolean_options = [ + 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', + 'editable', + 'no-deps', 'local-snapshots-ok', 'version', 'force-installation-into-system-dir' + ] + + if site.ENABLE_USER_SITE: + help_msg = "install in user site-package '%s'" % site.USER_SITE + user_options.append(('user', None, help_msg)) + boolean_options.append('user') + + negative_opt = {'always-unzip': 'zip-ok'} + create_index = PackageIndex + + def initialize_options(self): + # the --user option seems to be an opt-in one, + # so the default should be False. + self.user = 0 + self.zip_ok = self.local_snapshots_ok = None + self.install_dir = self.script_dir = self.exclude_scripts = None + self.index_url = None + self.find_links = None + self.build_directory = None + self.args = None + self.optimize = self.record = None + self.upgrade = self.always_copy = self.multi_version = None + self.editable = self.no_deps = self.allow_hosts = None + self.root = self.prefix = self.no_report = None + self.version = None + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib + self.install_scripts = None + self.install_data = None + self.install_base = None + self.install_platbase = None + if site.ENABLE_USER_SITE: + self.install_userbase = site.USER_BASE + self.install_usersite = site.USER_SITE + else: + self.install_userbase = None + self.install_usersite = None + self.no_find_links = None + + # Options not specifiable via command line + self.package_index = None + self.pth_file = self.always_copy_from = None + self.site_dirs = None + self.installed_projects = {} + self.sitepy_installed = False + # enable custom installation, known values: deb + self.install_layout = None + self.force_installation_into_system_dir = None + self.multiarch = None + + # Always read easy_install options, even if we are subclassed, or have + # an independent instance created. This ensures that defaults will + # always come from the standard configuration file(s)' "easy_install" + # section, even if this is a "develop" or "install" command, or some + # other embedding. + self._dry_run = None + self.verbose = self.distribution.verbose + self.distribution._set_command_options( + self, self.distribution.get_option_dict('easy_install') + ) + + def delete_blockers(self, blockers): + extant_blockers = ( + filename for filename in blockers + if os.path.exists(filename) or os.path.islink(filename) + ) + list(map(self._delete_path, extant_blockers)) + + def _delete_path(self, path): + log.info("Deleting %s", path) + if self.dry_run: + return + + is_tree = os.path.isdir(path) and not os.path.islink(path) + remover = rmtree if is_tree else os.unlink + remover(path) + + @staticmethod + def _render_version(): + """ + Render the Setuptools version and installation details, then exit. + """ + ver = '{}.{}'.format(*sys.version_info) + dist = get_distribution('setuptools') + tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})' + print(tmpl.format(**locals())) + raise SystemExit() + + def finalize_options(self): + self.version and self._render_version() + + py_version = sys.version.split()[0] + prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') + + self.config_vars = { + 'dist_name': self.distribution.get_name(), + 'dist_version': self.distribution.get_version(), + 'dist_fullname': self.distribution.get_fullname(), + 'py_version': py_version, + 'py_version_short': py_version[0:3], + 'py_version_nodot': py_version[0] + py_version[2], + 'sys_prefix': prefix, + 'prefix': prefix, + 'sys_exec_prefix': exec_prefix, + 'exec_prefix': exec_prefix, + # Only python 3.2+ has abiflags + 'abiflags': getattr(sys, 'abiflags', ''), + } + + if site.ENABLE_USER_SITE: + self.config_vars['userbase'] = self.install_userbase + self.config_vars['usersite'] = self.install_usersite + + self._fix_install_dir_for_user_site() + + self.expand_basedirs() + self.expand_dirs() + + if self.install_layout: + if not self.install_layout.lower() in ['deb']: + raise DistutilsOptionError("unknown value for --install-layout") + self.install_layout = self.install_layout.lower() + + import sysconfig + if sys.version_info[:2] >= (3, 3): + self.multiarch = sysconfig.get_config_var('MULTIARCH') + + self._expand( + 'install_dir', 'script_dir', 'build_directory', + 'site_dirs', + ) + # If a non-default installation directory was specified, default the + # script directory to match it. + if self.script_dir is None: + self.script_dir = self.install_dir + + if self.no_find_links is None: + self.no_find_links = False + + # Let install_dir get set by install_lib command, which in turn + # gets its info from the install command, and takes into account + # --prefix and --home and all that other crud. + self.set_undefined_options( + 'install_lib', ('install_dir', 'install_dir') + ) + # Likewise, set default script_dir from 'install_scripts.install_dir' + self.set_undefined_options( + 'install_scripts', ('install_dir', 'script_dir') + ) + + if self.user and self.install_purelib: + self.install_dir = self.install_purelib + self.script_dir = self.install_scripts + + if self.prefix == '/usr' and not self.force_installation_into_system_dir: + raise DistutilsOptionError("""installation into /usr + +Trying to install into the system managed parts of the file system. Please +consider to install to another location, or use the option +--force-installation-into-system-dir to overwrite this warning. +""") + + # default --record from the install command + self.set_undefined_options('install', ('record', 'record')) + # Should this be moved to the if statement below? It's not used + # elsewhere + normpath = map(normalize_path, sys.path) + self.all_site_dirs = get_site_dirs() + if self.site_dirs is not None: + site_dirs = [ + os.path.expanduser(s.strip()) for s in + self.site_dirs.split(',') + ] + for d in site_dirs: + if not os.path.isdir(d): + log.warn("%s (in --site-dirs) does not exist", d) + elif normalize_path(d) not in normpath: + raise DistutilsOptionError( + d + " (in --site-dirs) is not on sys.path" + ) + else: + self.all_site_dirs.append(normalize_path(d)) + if not self.editable: + self.check_site_dir() + self.index_url = self.index_url or "https://pypi.org/simple/" + self.shadow_path = self.all_site_dirs[:] + for path_item in self.install_dir, normalize_path(self.script_dir): + if path_item not in self.shadow_path: + self.shadow_path.insert(0, path_item) + + if self.allow_hosts is not None: + hosts = [s.strip() for s in self.allow_hosts.split(',')] + else: + hosts = ['*'] + if self.package_index is None: + self.package_index = self.create_index( + self.index_url, search_path=self.shadow_path, hosts=hosts, + ) + self.local_index = Environment(self.shadow_path + sys.path) + + if self.find_links is not None: + if isinstance(self.find_links, six.string_types): + self.find_links = self.find_links.split() + else: + self.find_links = [] + if self.local_snapshots_ok: + self.package_index.scan_egg_links(self.shadow_path + sys.path) + if not self.no_find_links: + self.package_index.add_find_links(self.find_links) + self.set_undefined_options('install_lib', ('optimize', 'optimize')) + if not isinstance(self.optimize, int): + try: + self.optimize = int(self.optimize) + if not (0 <= self.optimize <= 2): + raise ValueError + except ValueError: + raise DistutilsOptionError("--optimize must be 0, 1, or 2") + + if self.editable and not self.build_directory: + raise DistutilsArgError( + "Must specify a build directory (-b) when using --editable" + ) + if not self.args: + raise DistutilsArgError( + "No urls, filenames, or requirements specified (see --help)") + + self.outputs = [] + + def _fix_install_dir_for_user_site(self): + """ + Fix the install_dir if "--user" was used. + """ + if not self.user or not site.ENABLE_USER_SITE: + return + + self.create_home_path() + if self.install_userbase is None: + msg = "User base directory is not specified" + raise DistutilsPlatformError(msg) + self.install_base = self.install_platbase = self.install_userbase + scheme_name = os.name.replace('posix', 'unix') + '_user' + self.select_scheme(scheme_name) + + def _expand_attrs(self, attrs): + for attr in attrs: + val = getattr(self, attr) + if val is not None: + if os.name == 'posix' or os.name == 'nt': + val = os.path.expanduser(val) + val = subst_vars(val, self.config_vars) + setattr(self, attr, val) + + def expand_basedirs(self): + """Calls `os.path.expanduser` on install_base, install_platbase and + root.""" + self._expand_attrs(['install_base', 'install_platbase', 'root']) + + def expand_dirs(self): + """Calls `os.path.expanduser` on install dirs.""" + dirs = [ + 'install_purelib', + 'install_platlib', + 'install_lib', + 'install_headers', + 'install_scripts', + 'install_data', + ] + self._expand_attrs(dirs) + + def run(self, show_deprecation=True): + if show_deprecation: + self.announce( + "WARNING: The easy_install command is deprecated " + "and will be removed in a future version." + , log.WARN, + ) + if self.verbose != self.distribution.verbose: + log.set_verbosity(self.verbose) + try: + for spec in self.args: + self.easy_install(spec, not self.no_deps) + if self.record: + outputs = list(sorted(self.outputs)) + if self.root: # strip any package prefix + root_len = len(self.root) + for counter in range(len(outputs)): + outputs[counter] = outputs[counter][root_len:] + from distutils import file_util + + self.execute( + file_util.write_file, (self.record, outputs), + "writing list of installed files to '%s'" % + self.record + ) + self.warn_deprecated_options() + finally: + log.set_verbosity(self.distribution.verbose) + + def pseudo_tempname(self): + """Return a pseudo-tempname base in the install directory. + This code is intentionally naive; if a malicious party can write to + the target directory you're already in deep doodoo. + """ + try: + pid = os.getpid() + except Exception: + pid = random.randint(0, sys.maxsize) + return os.path.join(self.install_dir, "test-easy-install-%s" % pid) + + def warn_deprecated_options(self): + pass + + def check_site_dir(self): + """Verify that self.install_dir is .pth-capable dir, if needed""" + + instdir = normalize_path(self.install_dir) + pth_file = os.path.join(instdir, 'easy-install.pth') + + # Is it a configured, PYTHONPATH, implicit, or explicit site dir? + is_site_dir = instdir in self.all_site_dirs + + if not is_site_dir and not self.multi_version: + # No? Then directly test whether it does .pth file processing + is_site_dir = self.check_pth_processing() + else: + # make sure we can write to target dir + testfile = self.pseudo_tempname() + '.write-test' + test_exists = os.path.exists(testfile) + try: + if test_exists: + os.unlink(testfile) + open(testfile, 'w').close() + os.unlink(testfile) + except (OSError, IOError): + self.cant_write_to_target() + + if not is_site_dir and not self.multi_version: + # Can't install non-multi to non-site dir + raise DistutilsError(self.no_default_version_msg()) + + if is_site_dir: + if self.pth_file is None: + self.pth_file = PthDistributions(pth_file, self.all_site_dirs) + else: + self.pth_file = None + + if instdir not in map(normalize_path, _pythonpath()): + # only PYTHONPATH dirs need a site.py, so pretend it's there + self.sitepy_installed = True + elif self.multi_version and not os.path.exists(pth_file): + self.sitepy_installed = True # don't need site.py in this case + self.pth_file = None # and don't create a .pth file + self.install_dir = instdir + + __cant_write_msg = textwrap.dedent(""" + can't create or remove files in install directory + + The following error occurred while trying to add or remove files in the + installation directory: + + %s + + The installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + """).lstrip() + + __not_exists_id = textwrap.dedent(""" + This directory does not currently exist. Please create it and try again, or + choose a different installation directory (using the -d or --install-dir + option). + """).lstrip() + + __access_msg = textwrap.dedent(""" + Perhaps your account does not have write access to this directory? If the + installation directory is a system-owned directory, you may need to sign in + as the administrator or "root" account. If you do not have administrative + access to this machine, you may wish to choose a different installation + directory, preferably one that is listed in your PYTHONPATH environment + variable. + + For information on other options, you may wish to consult the + documentation at: + + https://setuptools.readthedocs.io/en/latest/easy_install.html + + Please make the appropriate changes for your system and try again. + """).lstrip() + + def cant_write_to_target(self): + msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,) + + if not os.path.exists(self.install_dir): + msg += '\n' + self.__not_exists_id + else: + msg += '\n' + self.__access_msg + raise DistutilsError(msg) + + def check_pth_processing(self): + """Empirically verify whether .pth files are supported in inst. dir""" + instdir = self.install_dir + log.info("Checking .pth file support in %s", instdir) + pth_file = self.pseudo_tempname() + ".pth" + ok_file = pth_file + '.ok' + ok_exists = os.path.exists(ok_file) + tmpl = _one_liner(""" + import os + f = open({ok_file!r}, 'w') + f.write('OK') + f.close() + """) + '\n' + try: + if ok_exists: + os.unlink(ok_file) + dirname = os.path.dirname(ok_file) + pkg_resources.py31compat.makedirs(dirname, exist_ok=True) + f = open(pth_file, 'w') + except (OSError, IOError): + self.cant_write_to_target() + else: + try: + f.write(tmpl.format(**locals())) + f.close() + f = None + executable = sys.executable + if os.name == 'nt': + dirname, basename = os.path.split(executable) + alt = os.path.join(dirname, 'pythonw.exe') + use_alt = ( + basename.lower() == 'python.exe' and + os.path.exists(alt) + ) + if use_alt: + # use pythonw.exe to avoid opening a console window + executable = alt + + from distutils.spawn import spawn + + spawn([executable, '-E', '-c', 'pass'], 0) + + if os.path.exists(ok_file): + log.info( + "TEST PASSED: %s appears to support .pth files", + instdir + ) + return True + finally: + if f: + f.close() + if os.path.exists(ok_file): + os.unlink(ok_file) + if os.path.exists(pth_file): + os.unlink(pth_file) + if not self.multi_version: + log.warn("TEST FAILED: %s does NOT support .pth files", instdir) + return False + + def install_egg_scripts(self, dist): + """Write all the scripts for `dist`, unless scripts are excluded""" + if not self.exclude_scripts and dist.metadata_isdir('scripts'): + for script_name in dist.metadata_listdir('scripts'): + if dist.metadata_isdir('scripts/' + script_name): + # The "script" is a directory, likely a Python 3 + # __pycache__ directory, so skip it. + continue + self.install_script( + dist, script_name, + dist.get_metadata('scripts/' + script_name) + ) + self.install_wrapper_scripts(dist) + + def add_output(self, path): + if os.path.isdir(path): + for base, dirs, files in os.walk(path): + for filename in files: + self.outputs.append(os.path.join(base, filename)) + else: + self.outputs.append(path) + + def not_editable(self, spec): + if self.editable: + raise DistutilsArgError( + "Invalid argument %r: you can't use filenames or URLs " + "with --editable (except via the --find-links option)." + % (spec,) + ) + + def check_editable(self, spec): + if not self.editable: + return + + if os.path.exists(os.path.join(self.build_directory, spec.key)): + raise DistutilsArgError( + "%r already exists in %s; can't do a checkout there" % + (spec.key, self.build_directory) + ) + + @contextlib.contextmanager + def _tmpdir(self): + tmpdir = tempfile.mkdtemp(prefix=u"easy_install-") + try: + # cast to str as workaround for #709 and #710 and #712 + yield str(tmpdir) + finally: + os.path.exists(tmpdir) and rmtree(rmtree_safe(tmpdir)) + + def easy_install(self, spec, deps=False): + if not self.editable: + self.install_site_py() + + with self._tmpdir() as tmpdir: + if not isinstance(spec, Requirement): + if URL_SCHEME(spec): + # It's a url, download it to tmpdir and process + self.not_editable(spec) + dl = self.package_index.download(spec, tmpdir) + return self.install_item(None, dl, tmpdir, deps, True) + + elif os.path.exists(spec): + # Existing file or directory, just process it directly + self.not_editable(spec) + return self.install_item(None, spec, tmpdir, deps, True) + else: + spec = parse_requirement_arg(spec) + + self.check_editable(spec) + dist = self.package_index.fetch_distribution( + spec, tmpdir, self.upgrade, self.editable, + not self.always_copy, self.local_index + ) + if dist is None: + msg = "Could not find suitable distribution for %r" % spec + if self.always_copy: + msg += " (--always-copy skips system and development eggs)" + raise DistutilsError(msg) + elif dist.precedence == DEVELOP_DIST: + # .egg-info dists don't need installing, just process deps + self.process_distribution(spec, dist, deps, "Using") + return dist + else: + return self.install_item(spec, dist.location, tmpdir, deps) + + def install_item(self, spec, download, tmpdir, deps, install_needed=False): + + # Installation is also needed if file in tmpdir or is not an egg + install_needed = install_needed or self.always_copy + install_needed = install_needed or os.path.dirname(download) == tmpdir + install_needed = install_needed or not download.endswith('.egg') + install_needed = install_needed or ( + self.always_copy_from is not None and + os.path.dirname(normalize_path(download)) == + normalize_path(self.always_copy_from) + ) + + if spec and not install_needed: + # at this point, we know it's a local .egg, we just don't know if + # it's already installed. + for dist in self.local_index[spec.project_name]: + if dist.location == download: + break + else: + install_needed = True # it's not in the local index + + log.info("Processing %s", os.path.basename(download)) + + if install_needed: + dists = self.install_eggs(spec, download, tmpdir) + for dist in dists: + self.process_distribution(spec, dist, deps) + else: + dists = [self.egg_distribution(download)] + self.process_distribution(spec, dists[0], deps, "Using") + + if spec is not None: + for dist in dists: + if dist in spec: + return dist + + def select_scheme(self, name): + """Sets the install directories by applying the install schemes.""" + # it's the caller's problem if they supply a bad name! + scheme = INSTALL_SCHEMES[name] + for key in SCHEME_KEYS: + attrname = 'install_' + key + if getattr(self, attrname) is None: + setattr(self, attrname, scheme[key]) + + def process_distribution(self, requirement, dist, deps=True, *info): + self.update_pth(dist) + self.package_index.add(dist) + if dist in self.local_index[dist.key]: + self.local_index.remove(dist) + self.local_index.add(dist) + self.install_egg_scripts(dist) + self.installed_projects[dist.key] = dist + log.info(self.installation_report(requirement, dist, *info)) + if (dist.has_metadata('dependency_links.txt') and + not self.no_find_links): + self.package_index.add_find_links( + dist.get_metadata_lines('dependency_links.txt') + ) + if not deps and not self.always_copy: + return + elif requirement is not None and dist.key != requirement.key: + log.warn("Skipping dependencies for %s", dist) + return # XXX this is not the distribution we were looking for + elif requirement is None or dist not in requirement: + # if we wound up with a different version, resolve what we've got + distreq = dist.as_requirement() + requirement = Requirement(str(distreq)) + log.info("Processing dependencies for %s", requirement) + try: + distros = WorkingSet([]).resolve( + [requirement], self.local_index, self.easy_install + ) + except DistributionNotFound as e: + raise DistutilsError(str(e)) + except VersionConflict as e: + raise DistutilsError(e.report()) + if self.always_copy or self.always_copy_from: + # Force all the relevant distros to be copied or activated + for dist in distros: + if dist.key not in self.installed_projects: + self.easy_install(dist.as_requirement()) + log.info("Finished processing dependencies for %s", requirement) + + def should_unzip(self, dist): + if self.zip_ok is not None: + return not self.zip_ok + if dist.has_metadata('not-zip-safe'): + return True + if not dist.has_metadata('zip-safe'): + return True + return False + + def maybe_move(self, spec, dist_filename, setup_base): + dst = os.path.join(self.build_directory, spec.key) + if os.path.exists(dst): + msg = ( + "%r already exists in %s; build directory %s will not be kept" + ) + log.warn(msg, spec.key, self.build_directory, setup_base) + return setup_base + if os.path.isdir(dist_filename): + setup_base = dist_filename + else: + if os.path.dirname(dist_filename) == setup_base: + os.unlink(dist_filename) # get it out of the tmp dir + contents = os.listdir(setup_base) + if len(contents) == 1: + dist_filename = os.path.join(setup_base, contents[0]) + if os.path.isdir(dist_filename): + # if the only thing there is a directory, move it instead + setup_base = dist_filename + ensure_directory(dst) + shutil.move(setup_base, dst) + return dst + + def install_wrapper_scripts(self, dist): + if self.exclude_scripts: + return + for args in ScriptWriter.best().get_args(dist): + self.write_script(*args) + + def install_script(self, dist, script_name, script_text, dev_path=None): + """Generate a legacy script wrapper and install it""" + spec = str(dist.as_requirement()) + is_script = is_python_script(script_text, script_name) + + if is_script: + body = self._load_template(dev_path) % locals() + script_text = ScriptWriter.get_header(script_text) + body + self.write_script(script_name, _to_bytes(script_text), 'b') + + @staticmethod + def _load_template(dev_path): + """ + There are a couple of template scripts in the package. This + function loads one of them and prepares it for use. + """ + # See https://github.com/pypa/setuptools/issues/134 for info + # on script file naming and downstream issues with SVR4 + name = 'script.tmpl' + if dev_path: + name = name.replace('.tmpl', ' (dev).tmpl') + + raw_bytes = resource_string('setuptools', name) + return raw_bytes.decode('utf-8') + + def write_script(self, script_name, contents, mode="t", blockers=()): + """Write an executable file to the scripts directory""" + self.delete_blockers( # clean up old .py/.pyw w/o a script + [os.path.join(self.script_dir, x) for x in blockers] + ) + log.info("Installing %s script to %s", script_name, self.script_dir) + target = os.path.join(self.script_dir, script_name) + self.add_output(target) + + if self.dry_run: + return + + mask = current_umask() + ensure_directory(target) + if os.path.exists(target): + os.unlink(target) + with open(target, "w" + mode) as f: + f.write(contents) + chmod(target, 0o777 - mask) + + def install_eggs(self, spec, dist_filename, tmpdir): + # .egg dirs or files are already built, so just return them + if dist_filename.lower().endswith('.egg'): + return [self.install_egg(dist_filename, tmpdir)] + elif dist_filename.lower().endswith('.exe'): + return [self.install_exe(dist_filename, tmpdir)] + elif dist_filename.lower().endswith('.whl'): + return [self.install_wheel(dist_filename, tmpdir)] + + # Anything else, try to extract and build + setup_base = tmpdir + if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): + unpack_archive(dist_filename, tmpdir, self.unpack_progress) + elif os.path.isdir(dist_filename): + setup_base = os.path.abspath(dist_filename) + + if (setup_base.startswith(tmpdir) # something we downloaded + and self.build_directory and spec is not None): + setup_base = self.maybe_move(spec, dist_filename, setup_base) + + # Find the setup.py file + setup_script = os.path.join(setup_base, 'setup.py') + + if not os.path.exists(setup_script): + setups = glob(os.path.join(setup_base, '*', 'setup.py')) + if not setups: + raise DistutilsError( + "Couldn't find a setup script in %s" % + os.path.abspath(dist_filename) + ) + if len(setups) > 1: + raise DistutilsError( + "Multiple setup scripts in %s" % + os.path.abspath(dist_filename) + ) + setup_script = setups[0] + + # Now run it, and return the result + if self.editable: + log.info(self.report_editable(spec, setup_script)) + return [] + else: + return self.build_and_install(setup_script, setup_base) + + def egg_distribution(self, egg_path): + if os.path.isdir(egg_path): + metadata = PathMetadata(egg_path, os.path.join(egg_path, + 'EGG-INFO')) + else: + metadata = EggMetadata(zipimport.zipimporter(egg_path)) + return Distribution.from_filename(egg_path, metadata=metadata) + + def install_egg(self, egg_path, tmpdir): + destination = os.path.join( + self.install_dir, + os.path.basename(egg_path), + ) + destination = os.path.abspath(destination) + if not self.dry_run: + ensure_directory(destination) + + dist = self.egg_distribution(egg_path) + if not samefile(egg_path, destination): + if os.path.isdir(destination) and not os.path.islink(destination): + dir_util.remove_tree(destination, dry_run=self.dry_run) + elif os.path.exists(destination): + self.execute( + os.unlink, + (destination,), + "Removing " + destination, + ) + try: + new_dist_is_zipped = False + if os.path.isdir(egg_path): + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copytree, "Copying" + elif self.should_unzip(dist): + self.mkpath(destination) + f, m = self.unpack_and_compile, "Extracting" + else: + new_dist_is_zipped = True + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copy2, "Copying" + self.execute( + f, + (egg_path, destination), + (m + " %s to %s") % ( + os.path.basename(egg_path), + os.path.dirname(destination) + ), + ) + update_dist_caches( + destination, + fix_zipimporter_caches=new_dist_is_zipped, + ) + except Exception: + update_dist_caches(destination, fix_zipimporter_caches=False) + raise + + self.add_output(destination) + return self.egg_distribution(destination) + + def install_exe(self, dist_filename, tmpdir): + # See if it's valid, get data + cfg = extract_wininst_cfg(dist_filename) + if cfg is None: + raise DistutilsError( + "%s is not a valid distutils Windows .exe" % dist_filename + ) + # Create a dummy distribution object until we build the real distro + dist = Distribution( + None, + project_name=cfg.get('metadata', 'name'), + version=cfg.get('metadata', 'version'), platform=get_platform(), + ) + + # Convert the .exe to an unpacked egg + egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg') + dist.location = egg_path + egg_tmp = egg_path + '.tmp' + _egg_info = os.path.join(egg_tmp, 'EGG-INFO') + pkg_inf = os.path.join(_egg_info, 'PKG-INFO') + ensure_directory(pkg_inf) # make sure EGG-INFO dir exists + dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX + self.exe_to_egg(dist_filename, egg_tmp) + + # Write EGG-INFO/PKG-INFO + if not os.path.exists(pkg_inf): + f = open(pkg_inf, 'w') + f.write('Metadata-Version: 1.0\n') + for k, v in cfg.items('metadata'): + if k != 'target_version': + f.write('%s: %s\n' % (k.replace('_', '-').title(), v)) + f.close() + script_dir = os.path.join(_egg_info, 'scripts') + # delete entry-point scripts to avoid duping + self.delete_blockers([ + os.path.join(script_dir, args[0]) + for args in ScriptWriter.get_args(dist) + ]) + # Build .egg file from tmpdir + bdist_egg.make_zipfile( + egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run, + ) + # install the .egg + return self.install_egg(egg_path, tmpdir) + + def exe_to_egg(self, dist_filename, egg_tmp): + """Extract a bdist_wininst to the directories an egg would use""" + # Check for .pth file and set up prefix translations + prefixes = get_exe_prefixes(dist_filename) + to_compile = [] + native_libs = [] + top_level = {} + + def process(src, dst): + s = src.lower() + for old, new in prefixes: + if s.startswith(old): + src = new + src[len(old):] + parts = src.split('/') + dst = os.path.join(egg_tmp, *parts) + dl = dst.lower() + if dl.endswith('.pyd') or dl.endswith('.dll'): + parts[-1] = bdist_egg.strip_module(parts[-1]) + top_level[os.path.splitext(parts[0])[0]] = 1 + native_libs.append(src) + elif dl.endswith('.py') and old != 'SCRIPTS/': + top_level[os.path.splitext(parts[0])[0]] = 1 + to_compile.append(dst) + return dst + if not src.endswith('.pth'): + log.warn("WARNING: can't process %s", src) + return None + + # extract, tracking .pyd/.dll->native_libs and .py -> to_compile + unpack_archive(dist_filename, egg_tmp, process) + stubs = [] + for res in native_libs: + if res.lower().endswith('.pyd'): # create stubs for .pyd's + parts = res.split('/') + resource = parts[-1] + parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py' + pyfile = os.path.join(egg_tmp, *parts) + to_compile.append(pyfile) + stubs.append(pyfile) + bdist_egg.write_stub(resource, pyfile) + self.byte_compile(to_compile) # compile .py's + bdist_egg.write_safety_flag( + os.path.join(egg_tmp, 'EGG-INFO'), + bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag + + for name in 'top_level', 'native_libs': + if locals()[name]: + txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') + if not os.path.exists(txt): + f = open(txt, 'w') + f.write('\n'.join(locals()[name]) + '\n') + f.close() + + def install_wheel(self, wheel_path, tmpdir): + wheel = Wheel(wheel_path) + assert wheel.is_compatible() + destination = os.path.join(self.install_dir, wheel.egg_name()) + destination = os.path.abspath(destination) + if not self.dry_run: + ensure_directory(destination) + if os.path.isdir(destination) and not os.path.islink(destination): + dir_util.remove_tree(destination, dry_run=self.dry_run) + elif os.path.exists(destination): + self.execute( + os.unlink, + (destination,), + "Removing " + destination, + ) + try: + self.execute( + wheel.install_as_egg, + (destination,), + ("Installing %s to %s") % ( + os.path.basename(wheel_path), + os.path.dirname(destination) + ), + ) + finally: + update_dist_caches(destination, fix_zipimporter_caches=False) + self.add_output(destination) + return self.egg_distribution(destination) + + __mv_warning = textwrap.dedent(""" + Because this distribution was installed --multi-version, before you can + import modules from this package in an application, you will need to + 'import pkg_resources' and then use a 'require()' call similar to one of + these examples, in order to select the desired version: + + pkg_resources.require("%(name)s") # latest installed version + pkg_resources.require("%(name)s==%(version)s") # this exact version + pkg_resources.require("%(name)s>=%(version)s") # this version or higher + """).lstrip() + + __id_warning = textwrap.dedent(""" + Note also that the installation directory must be on sys.path at runtime for + this to work. (e.g. by being the application's script directory, by being on + PYTHONPATH, or by being added to sys.path by your code.) + """) + + def installation_report(self, req, dist, what="Installed"): + """Helpful installation message for display to package users""" + msg = "\n%(what)s %(eggloc)s%(extras)s" + if self.multi_version and not self.no_report: + msg += '\n' + self.__mv_warning + if self.install_dir not in map(normalize_path, sys.path): + msg += '\n' + self.__id_warning + + eggloc = dist.location + name = dist.project_name + version = dist.version + extras = '' # TODO: self.report_extras(req, dist) + return msg % locals() + + __editable_msg = textwrap.dedent(""" + Extracted editable version of %(spec)s to %(dirname)s + + If it uses setuptools in its setup script, you can activate it in + "development" mode by going to that directory and running:: + + %(python)s setup.py develop + + See the setuptools documentation for the "develop" command for more info. + """).lstrip() + + def report_editable(self, spec, setup_script): + dirname = os.path.dirname(setup_script) + python = sys.executable + return '\n' + self.__editable_msg % locals() + + def run_setup(self, setup_script, setup_base, args): + sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) + sys.modules.setdefault('distutils.command.egg_info', egg_info) + + args = list(args) + if self.verbose > 2: + v = 'v' * (self.verbose - 1) + args.insert(0, '-' + v) + elif self.verbose < 2: + args.insert(0, '-q') + if self.dry_run: + args.insert(0, '-n') + log.info( + "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) + ) + try: + run_setup(setup_script, args) + except SystemExit as v: + raise DistutilsError("Setup script exited with %s" % (v.args[0],)) + + def build_and_install(self, setup_script, setup_base): + args = ['bdist_egg', '--dist-dir'] + + dist_dir = tempfile.mkdtemp( + prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) + ) + try: + self._set_fetcher_options(os.path.dirname(setup_script)) + args.append(dist_dir) + + self.run_setup(setup_script, setup_base, args) + all_eggs = Environment([dist_dir]) + eggs = [] + for key in all_eggs: + for dist in all_eggs[key]: + eggs.append(self.install_egg(dist.location, setup_base)) + if not eggs and not self.dry_run: + log.warn("No eggs found in %s (setup script problem?)", + dist_dir) + return eggs + finally: + rmtree(dist_dir) + log.set_verbosity(self.verbose) # restore our log verbosity + + def _set_fetcher_options(self, base): + """ + When easy_install is about to run bdist_egg on a source dist, that + source dist might have 'setup_requires' directives, requiring + additional fetching. Ensure the fetcher options given to easy_install + are available to that command as well. + """ + # find the fetch options from easy_install and write them out + # to the setup.cfg file. + ei_opts = self.distribution.get_option_dict('easy_install').copy() + fetch_directives = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', 'allow_hosts', + ) + fetch_options = {} + for key, val in ei_opts.items(): + if key not in fetch_directives: + continue + fetch_options[key.replace('_', '-')] = val[1] + # create a settings dictionary suitable for `edit_config` + settings = dict(easy_install=fetch_options) + cfg_filename = os.path.join(base, 'setup.cfg') + setopt.edit_config(cfg_filename, settings) + + def update_pth(self, dist): + if self.pth_file is None: + return + + for d in self.pth_file[dist.key]: # drop old entries + if self.multi_version or d.location != dist.location: + log.info("Removing %s from easy-install.pth file", d) + self.pth_file.remove(d) + if d.location in self.shadow_path: + self.shadow_path.remove(d.location) + + if not self.multi_version: + if dist.location in self.pth_file.paths: + log.info( + "%s is already the active version in easy-install.pth", + dist, + ) + else: + log.info("Adding %s to easy-install.pth file", dist) + self.pth_file.add(dist) # add new entry + if dist.location not in self.shadow_path: + self.shadow_path.append(dist.location) + + if not self.dry_run: + + self.pth_file.save() + + if dist.key == 'setuptools': + # Ensure that setuptools itself never becomes unavailable! + # XXX should this check for latest version? + filename = os.path.join(self.install_dir, 'setuptools.pth') + if os.path.islink(filename): + os.unlink(filename) + f = open(filename, 'wt') + f.write(self.pth_file.make_relative(dist.location) + '\n') + f.close() + + def unpack_progress(self, src, dst): + # Progress filter for unpacking + log.debug("Unpacking %s to %s", src, dst) + return dst # only unpack-and-compile skips files for dry run + + def unpack_and_compile(self, egg_path, destination): + to_compile = [] + to_chmod = [] + + def pf(src, dst): + if dst.endswith('.py') and not src.startswith('EGG-INFO/'): + to_compile.append(dst) + elif dst.endswith('.dll') or dst.endswith('.so'): + to_chmod.append(dst) + self.unpack_progress(src, dst) + return not self.dry_run and dst or None + + unpack_archive(egg_path, destination, pf) + self.byte_compile(to_compile) + if not self.dry_run: + for f in to_chmod: + mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755 + chmod(f, mode) + + def byte_compile(self, to_compile): + if sys.dont_write_bytecode: + return + + from distutils.util import byte_compile + + try: + # try to make the byte compile messages quieter + log.set_verbosity(self.verbose - 1) + + byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) + if self.optimize: + byte_compile( + to_compile, optimize=self.optimize, force=1, + dry_run=self.dry_run, + ) + finally: + log.set_verbosity(self.verbose) # restore original verbosity + + __no_default_msg = textwrap.dedent(""" + bad install directory or PYTHONPATH + + You are attempting to install a package to a directory that is not + on PYTHONPATH and which Python does not read ".pth" files from. The + installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + + and your PYTHONPATH environment variable currently contains: + + %r + + Here are some of your options for correcting the problem: + + * You can choose a different installation directory, i.e., one that is + on PYTHONPATH or supports .pth files + + * You can add the installation directory to the PYTHONPATH environment + variable. (It must then also be on PYTHONPATH whenever you run + Python and want to use the package(s) you are installing.) + + * You can set up the installation directory to support ".pth" files by + using one of the approaches described here: + + https://setuptools.readthedocs.io/en/latest/easy_install.html#custom-installation-locations + + + Please make the appropriate changes for your system and try again.""").lstrip() + + def no_default_version_msg(self): + template = self.__no_default_msg + return template % (self.install_dir, os.environ.get('PYTHONPATH', '')) + + def install_site_py(self): + """Make sure there's a site.py in the target dir, if needed""" + + if self.sitepy_installed: + return # already did it, or don't need to + + sitepy = os.path.join(self.install_dir, "site.py") + source = resource_string("setuptools", "site-patch.py") + source = source.decode('utf-8') + current = "" + + if os.path.exists(sitepy): + log.debug("Checking existing site.py in %s", self.install_dir) + with io.open(sitepy) as strm: + current = strm.read() + + if not current.startswith('def __boot():'): + raise DistutilsError( + "%s is not a setuptools-generated site.py; please" + " remove it." % sitepy + ) + + if current != source: + log.info("Creating %s", sitepy) + if not self.dry_run: + ensure_directory(sitepy) + with io.open(sitepy, 'w', encoding='utf-8') as strm: + strm.write(source) + self.byte_compile([sitepy]) + + self.sitepy_installed = True + + def create_home_path(self): + """Create directories under ~.""" + if not self.user: + return + home = convert_path(os.path.expanduser("~")) + for name, path in six.iteritems(self.config_vars): + if path.startswith(home) and not os.path.isdir(path): + self.debug_print("os.makedirs('%s', 0o700)" % path) + os.makedirs(path, 0o700) + + if sys.version[:3] in ('2.3', '2.4', '2.5') or 'real_prefix' in sys.__dict__: + sitedir_name = 'site-packages' + else: + sitedir_name = 'dist-packages' + + INSTALL_SCHEMES = dict( + posix=dict( + install_dir='$base/lib/python$py_version_short/site-packages', + script_dir='$base/bin', + ), + unix_local = dict( + install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name, + script_dir = '$base/local/bin', + ), + posix_local = dict( + install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name, + script_dir = '$base/local/bin', + ), + deb_system = dict( + install_dir = '$base/lib/python3/%s' % sitedir_name, + script_dir = '$base/bin', + ), + ) + + DEFAULT_SCHEME = dict( + install_dir='$base/Lib/site-packages', + script_dir='$base/Scripts', + ) + + def _expand(self, *attrs): + config_vars = self.get_finalized_command('install').config_vars + + if self.prefix or self.install_layout: + if self.install_layout and self.install_layout in ['deb']: + scheme_name = "deb_system" + self.prefix = '/usr' + elif self.prefix or 'real_prefix' in sys.__dict__: + scheme_name = os.name + else: + scheme_name = "posix_local" + # Set default install_dir/scripts from --prefix + config_vars = config_vars.copy() + config_vars['base'] = self.prefix + scheme = self.INSTALL_SCHEMES.get(scheme_name,self.DEFAULT_SCHEME) + for attr, val in scheme.items(): + if getattr(self, attr, None) is None: + setattr(self, attr, val) + + from distutils.util import subst_vars + + for attr in attrs: + val = getattr(self, attr) + if val is not None: + val = subst_vars(val, config_vars) + if os.name == 'posix': + val = os.path.expanduser(val) + setattr(self, attr, val) + + +def _pythonpath(): + items = os.environ.get('PYTHONPATH', '').split(os.pathsep) + return filter(None, items) + + +def get_site_dirs(): + """ + Return a list of 'site' dirs + """ + + sitedirs = [] + + # start with PYTHONPATH + sitedirs.extend(_pythonpath()) + + prefixes = [sys.prefix] + if sys.exec_prefix != sys.prefix: + prefixes.append(sys.exec_prefix) + for prefix in prefixes: + if prefix: + if sys.platform in ('os2emx', 'riscos'): + sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) + elif os.sep == '/': + sitedirs.extend([ + os.path.join( + prefix, + "local/lib", + "python" + sys.version[:3], + "dist-packages", + ), + os.path.join( + prefix, + "lib", + "python{}.{}".format(*sys.version_info), + "dist-packages", + ), + os.path.join(prefix, "lib", "site-python"), + ]) + else: + sitedirs.extend([ + prefix, + os.path.join(prefix, "lib", "site-packages"), + ]) + if sys.platform == 'darwin': + # for framework builds *only* we add the standard Apple + # locations. Currently only per-user, but /Library and + # /Network/Library could be added too + if 'Python.framework' in prefix: + home = os.environ.get('HOME') + if home: + home_sp = os.path.join( + home, + 'Library', + 'Python', + '{}.{}'.format(*sys.version_info), + 'site-packages', + ) + sitedirs.append(home_sp) + lib_paths = get_path('purelib'), get_path('platlib') + for site_lib in lib_paths: + if site_lib not in sitedirs: + sitedirs.append(site_lib) + + if site.ENABLE_USER_SITE: + sitedirs.append(site.USER_SITE) + + try: + sitedirs.extend(site.getsitepackages()) + except AttributeError: + pass + + sitedirs = list(map(normalize_path, sitedirs)) + + return sitedirs + + +def expand_paths(inputs): + """Yield sys.path directories that might contain "old-style" packages""" + + seen = {} + + for dirname in inputs: + dirname = normalize_path(dirname) + if dirname in seen: + continue + + seen[dirname] = 1 + if not os.path.isdir(dirname): + continue + + files = os.listdir(dirname) + yield dirname, files + + for name in files: + if not name.endswith('.pth'): + # We only care about the .pth files + continue + if name in ('easy-install.pth', 'setuptools.pth'): + # Ignore .pth files that we control + continue + + # Read the .pth file + f = open(os.path.join(dirname, name)) + lines = list(yield_lines(f)) + f.close() + + # Yield existing non-dupe, non-import directory lines from it + for line in lines: + if not line.startswith("import"): + line = normalize_path(line.rstrip()) + if line not in seen: + seen[line] = 1 + if not os.path.isdir(line): + continue + yield line, os.listdir(line) + + +def extract_wininst_cfg(dist_filename): + """Extract configuration data from a bdist_wininst .exe + + Returns a configparser.RawConfigParser, or None + """ + f = open(dist_filename, 'rb') + try: + endrec = zipfile._EndRecData(f) + if endrec is None: + return None + + prepended = (endrec[9] - endrec[5]) - endrec[6] + if prepended < 12: # no wininst data here + return None + f.seek(prepended - 12) + + tag, cfglen, bmlen = struct.unpack("<iii", f.read(12)) + if tag not in (0x1234567A, 0x1234567B): + return None # not a valid tag + + f.seek(prepended - (12 + cfglen)) + init = {'version': '', 'target_version': ''} + cfg = configparser.RawConfigParser(init) + try: + part = f.read(cfglen) + # Read up to the first null byte. + config = part.split(b'\0', 1)[0] + # Now the config is in bytes, but for RawConfigParser, it should + # be text, so decode it. + config = config.decode(sys.getfilesystemencoding()) + cfg.readfp(six.StringIO(config)) + except configparser.Error: + return None + if not cfg.has_section('metadata') or not cfg.has_section('Setup'): + return None + return cfg + + finally: + f.close() + + +def get_exe_prefixes(exe_filename): + """Get exe->egg path translations for a given .exe file""" + + prefixes = [ + ('PURELIB/', ''), + ('PLATLIB/pywin32_system32', ''), + ('PLATLIB/', ''), + ('SCRIPTS/', 'EGG-INFO/scripts/'), + ('DATA/lib/site-packages', ''), + ] + z = zipfile.ZipFile(exe_filename) + try: + for info in z.infolist(): + name = info.filename + parts = name.split('/') + if len(parts) == 3 and parts[2] == 'PKG-INFO': + if parts[1].endswith('.egg-info'): + prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/')) + break + if len(parts) != 2 or not name.endswith('.pth'): + continue + if name.endswith('-nspkg.pth'): + continue + if parts[0].upper() in ('PURELIB', 'PLATLIB'): + contents = z.read(name) + if six.PY3: + contents = contents.decode() + for pth in yield_lines(contents): + pth = pth.strip().replace('\\', '/') + if not pth.startswith('import'): + prefixes.append((('%s/%s/' % (parts[0], pth)), '')) + finally: + z.close() + prefixes = [(x.lower(), y) for x, y in prefixes] + prefixes.sort() + prefixes.reverse() + return prefixes + + +class PthDistributions(Environment): + """A .pth file with Distribution paths in it""" + + dirty = False + + def __init__(self, filename, sitedirs=()): + self.filename = filename + self.sitedirs = list(map(normalize_path, sitedirs)) + self.basedir = normalize_path(os.path.dirname(self.filename)) + self._load() + Environment.__init__(self, [], None, None) + for path in yield_lines(self.paths): + list(map(self.add, find_distributions(path, True))) + + def _load(self): + self.paths = [] + saw_import = False + seen = dict.fromkeys(self.sitedirs) + if os.path.isfile(self.filename): + f = open(self.filename, 'rt') + for line in f: + if line.startswith('import'): + saw_import = True + continue + path = line.rstrip() + self.paths.append(path) + if not path.strip() or path.strip().startswith('#'): + continue + # skip non-existent paths, in case somebody deleted a package + # manually, and duplicate paths as well + path = self.paths[-1] = normalize_path( + os.path.join(self.basedir, path) + ) + if not os.path.exists(path) or path in seen: + self.paths.pop() # skip it + self.dirty = True # we cleaned up, so we're dirty now :) + continue + seen[path] = 1 + f.close() + + if self.paths and not saw_import: + self.dirty = True # ensure anything we touch has import wrappers + while self.paths and not self.paths[-1].strip(): + self.paths.pop() + + def save(self): + """Write changed .pth file back to disk""" + if not self.dirty: + return + + rel_paths = list(map(self.make_relative, self.paths)) + if rel_paths: + log.debug("Saving %s", self.filename) + lines = self._wrap_lines(rel_paths) + data = '\n'.join(lines) + '\n' + + if os.path.islink(self.filename): + os.unlink(self.filename) + with open(self.filename, 'wt') as f: + f.write(data) + + elif os.path.exists(self.filename): + log.debug("Deleting empty %s", self.filename) + os.unlink(self.filename) + + self.dirty = False + + @staticmethod + def _wrap_lines(lines): + return lines + + def add(self, dist): + """Add `dist` to the distribution map""" + new_path = ( + dist.location not in self.paths and ( + dist.location not in self.sitedirs or + # account for '.' being in PYTHONPATH + dist.location == os.getcwd() + ) + ) + if new_path: + self.paths.append(dist.location) + self.dirty = True + Environment.add(self, dist) + + def remove(self, dist): + """Remove `dist` from the distribution map""" + while dist.location in self.paths: + self.paths.remove(dist.location) + self.dirty = True + Environment.remove(self, dist) + + def make_relative(self, path): + npath, last = os.path.split(normalize_path(path)) + baselen = len(self.basedir) + parts = [last] + sep = os.altsep == '/' and '/' or os.sep + while len(npath) >= baselen: + if npath == self.basedir: + parts.append(os.curdir) + parts.reverse() + return sep.join(parts) + npath, last = os.path.split(npath) + parts.append(last) + else: + return path + + +class RewritePthDistributions(PthDistributions): + @classmethod + def _wrap_lines(cls, lines): + yield cls.prelude + for line in lines: + yield line + yield cls.postlude + + prelude = _one_liner(""" + import sys + sys.__plen = len(sys.path) + """) + postlude = _one_liner(""" + import sys + new = sys.path[sys.__plen:] + del sys.path[sys.__plen:] + p = getattr(sys, '__egginsert', 0) + sys.path[p:p] = new + sys.__egginsert = p + len(new) + """) + + +if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite': + PthDistributions = RewritePthDistributions + + +def _first_line_re(): + """ + Return a regular expression based on first_line_re suitable for matching + strings. + """ + if isinstance(first_line_re.pattern, str): + return first_line_re + + # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. + return re.compile(first_line_re.pattern.decode()) + + +def auto_chmod(func, arg, exc): + if func in [os.unlink, os.remove] and os.name == 'nt': + chmod(arg, stat.S_IWRITE) + return func(arg) + et, ev, _ = sys.exc_info() + six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg)))) + + +def update_dist_caches(dist_path, fix_zipimporter_caches): + """ + Fix any globally cached `dist_path` related data + + `dist_path` should be a path of a newly installed egg distribution (zipped + or unzipped). + + sys.path_importer_cache contains finder objects that have been cached when + importing data from the original distribution. Any such finders need to be + cleared since the replacement distribution might be packaged differently, + e.g. a zipped egg distribution might get replaced with an unzipped egg + folder or vice versa. Having the old finders cached may then cause Python + to attempt loading modules from the replacement distribution using an + incorrect loader. + + zipimport.zipimporter objects are Python loaders charged with importing + data packaged inside zip archives. If stale loaders referencing the + original distribution, are left behind, they can fail to load modules from + the replacement distribution. E.g. if an old zipimport.zipimporter instance + is used to load data from a new zipped egg archive, it may cause the + operation to attempt to locate the requested data in the wrong location - + one indicated by the original distribution's zip archive directory + information. Such an operation may then fail outright, e.g. report having + read a 'bad local file header', or even worse, it may fail silently & + return invalid data. + + zipimport._zip_directory_cache contains cached zip archive directory + information for all existing zipimport.zipimporter instances and all such + instances connected to the same archive share the same cached directory + information. + + If asked, and the underlying Python implementation allows it, we can fix + all existing zipimport.zipimporter instances instead of having to track + them down and remove them one by one, by updating their shared cached zip + archive directory information. This, of course, assumes that the + replacement distribution is packaged as a zipped egg. + + If not asked to fix existing zipimport.zipimporter instances, we still do + our best to clear any remaining zipimport.zipimporter related cached data + that might somehow later get used when attempting to load data from the new + distribution and thus cause such load operations to fail. Note that when + tracking down such remaining stale data, we can not catch every conceivable + usage from here, and we clear only those that we know of and have found to + cause problems if left alive. Any remaining caches should be updated by + whomever is in charge of maintaining them, i.e. they should be ready to + handle us replacing their zip archives with new distributions at runtime. + + """ + # There are several other known sources of stale zipimport.zipimporter + # instances that we do not clear here, but might if ever given a reason to + # do so: + # * Global setuptools pkg_resources.working_set (a.k.a. 'master working + # set') may contain distributions which may in turn contain their + # zipimport.zipimporter loaders. + # * Several zipimport.zipimporter loaders held by local variables further + # up the function call stack when running the setuptools installation. + # * Already loaded modules may have their __loader__ attribute set to the + # exact loader instance used when importing them. Python 3.4 docs state + # that this information is intended mostly for introspection and so is + # not expected to cause us problems. + normalized_path = normalize_path(dist_path) + _uncache(normalized_path, sys.path_importer_cache) + if fix_zipimporter_caches: + _replace_zip_directory_cache_data(normalized_path) + else: + # Here, even though we do not want to fix existing and now stale + # zipimporter cache information, we still want to remove it. Related to + # Python's zip archive directory information cache, we clear each of + # its stale entries in two phases: + # 1. Clear the entry so attempting to access zip archive information + # via any existing stale zipimport.zipimporter instances fails. + # 2. Remove the entry from the cache so any newly constructed + # zipimport.zipimporter instances do not end up using old stale + # zip archive directory information. + # This whole stale data removal step does not seem strictly necessary, + # but has been left in because it was done before we started replacing + # the zip archive directory information cache content if possible, and + # there are no relevant unit tests that we can depend on to tell us if + # this is really needed. + _remove_and_clear_zip_directory_cache_data(normalized_path) + + +def _collect_zipimporter_cache_entries(normalized_path, cache): + """ + Return zipimporter cache entry keys related to a given normalized path. + + Alternative path spellings (e.g. those using different character case or + those using alternative path separators) related to the same path are + included. Any sub-path entries are included as well, i.e. those + corresponding to zip archives embedded in other zip archives. + + """ + result = [] + prefix_len = len(normalized_path) + for p in cache: + np = normalize_path(p) + if (np.startswith(normalized_path) and + np[prefix_len:prefix_len + 1] in (os.sep, '')): + result.append(p) + return result + + +def _update_zipimporter_cache(normalized_path, cache, updater=None): + """ + Update zipimporter cache data for a given normalized path. + + Any sub-path entries are processed as well, i.e. those corresponding to zip + archives embedded in other zip archives. + + Given updater is a callable taking a cache entry key and the original entry + (after already removing the entry from the cache), and expected to update + the entry and possibly return a new one to be inserted in its place. + Returning None indicates that the entry should not be replaced with a new + one. If no updater is given, the cache entries are simply removed without + any additional processing, the same as if the updater simply returned None. + + """ + for p in _collect_zipimporter_cache_entries(normalized_path, cache): + # N.B. pypy's custom zipimport._zip_directory_cache implementation does + # not support the complete dict interface: + # * Does not support item assignment, thus not allowing this function + # to be used only for removing existing cache entries. + # * Does not support the dict.pop() method, forcing us to use the + # get/del patterns instead. For more detailed information see the + # following links: + # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420 + # http://bit.ly/2h9itJX + old_entry = cache[p] + del cache[p] + new_entry = updater and updater(p, old_entry) + if new_entry is not None: + cache[p] = new_entry + + +def _uncache(normalized_path, cache): + _update_zipimporter_cache(normalized_path, cache) + + +def _remove_and_clear_zip_directory_cache_data(normalized_path): + def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): + old_entry.clear() + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=clear_and_remove_cached_zip_archive_directory_data) + + +# PyPy Python implementation does not allow directly writing to the +# zipimport._zip_directory_cache and so prevents us from attempting to correct +# its content. The best we can do there is clear the problematic cache content +# and have PyPy repopulate it as needed. The downside is that if there are any +# stale zipimport.zipimporter instances laying around, attempting to use them +# will fail due to not having its zip archive directory information available +# instead of being automatically corrected to use the new correct zip archive +# directory information. +if '__pypy__' in sys.builtin_module_names: + _replace_zip_directory_cache_data = \ + _remove_and_clear_zip_directory_cache_data +else: + + def _replace_zip_directory_cache_data(normalized_path): + def replace_cached_zip_archive_directory_data(path, old_entry): + # N.B. In theory, we could load the zip directory information just + # once for all updated path spellings, and then copy it locally and + # update its contained path strings to contain the correct + # spelling, but that seems like a way too invasive move (this cache + # structure is not officially documented anywhere and could in + # theory change with new Python releases) for no significant + # benefit. + old_entry.clear() + zipimport.zipimporter(path) + old_entry.update(zipimport._zip_directory_cache[path]) + return old_entry + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=replace_cached_zip_archive_directory_data) + + +def is_python(text, filename='<string>'): + "Is this string a valid Python script?" + try: + compile(text, filename, 'exec') + except (SyntaxError, TypeError): + return False + else: + return True + + +def is_sh(executable): + """Determine if the specified executable is a .sh (contains a #! line)""" + try: + with io.open(executable, encoding='latin-1') as fp: + magic = fp.read(2) + except (OSError, IOError): + return executable + return magic == '#!' + + +def nt_quote_arg(arg): + """Quote a command line argument according to Windows parsing rules""" + return subprocess.list2cmdline([arg]) + + +def is_python_script(script_text, filename): + """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. + """ + if filename.endswith('.py') or filename.endswith('.pyw'): + return True # extension says it's Python + if is_python(script_text, filename): + return True # it's syntactically valid Python + if script_text.startswith('#!'): + # It begins with a '#!' line, so check if 'python' is in it somewhere + return 'python' in script_text.splitlines()[0].lower() + + return False # Not any Python I can recognize + + +try: + from os import chmod as _chmod +except ImportError: + # Jython compatibility + def _chmod(*args): + pass + + +def chmod(path, mode): + log.debug("changing mode of %s to %o", path, mode) + try: + _chmod(path, mode) + except os.error as e: + log.debug("chmod failed: %s", e) + + +class CommandSpec(list): + """ + A command spec for a #! header, specified as a list of arguments akin to + those passed to Popen. + """ + + options = [] + split_args = dict() + + @classmethod + def best(cls): + """ + Choose the best CommandSpec class based on environmental conditions. + """ + return cls + + @classmethod + def _sys_executable(cls): + _default = os.path.normpath(sys.executable) + return os.environ.get('__PYVENV_LAUNCHER__', _default) + + @classmethod + def from_param(cls, param): + """ + Construct a CommandSpec from a parameter to build_scripts, which may + be None. + """ + if isinstance(param, cls): + return param + if isinstance(param, list): + return cls(param) + if param is None: + return cls.from_environment() + # otherwise, assume it's a string. + return cls.from_string(param) + + @classmethod + def from_environment(cls): + return cls([cls._sys_executable()]) + + @classmethod + def from_string(cls, string): + """ + Construct a command spec from a simple string representing a command + line parseable by shlex.split. + """ + items = shlex.split(string, **cls.split_args) + return cls(items) + + def install_options(self, script_text): + self.options = shlex.split(self._extract_options(script_text)) + cmdline = subprocess.list2cmdline(self) + if not isascii(cmdline): + self.options[:0] = ['-x'] + + @staticmethod + def _extract_options(orig_script): + """ + Extract any options from the first line of the script. + """ + first = (orig_script + '\n').splitlines()[0] + match = _first_line_re().match(first) + options = match.group(1) or '' if match else '' + return options.strip() + + def as_header(self): + return self._render(self + list(self.options)) + + @staticmethod + def _strip_quotes(item): + _QUOTES = '"\'' + for q in _QUOTES: + if item.startswith(q) and item.endswith(q): + return item[1:-1] + return item + + @staticmethod + def _render(items): + cmdline = subprocess.list2cmdline( + CommandSpec._strip_quotes(item.strip()) for item in items) + return '#!' + cmdline + '\n' + + +# For pbr compat; will be removed in a future version. +sys_executable = CommandSpec._sys_executable() + + +class WindowsCommandSpec(CommandSpec): + split_args = dict(posix=False) + + +class ScriptWriter: + """ + Encapsulates behavior around writing entry point scripts for console and + gui apps. + """ + + template = textwrap.dedent(r""" + # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r + __requires__ = %(spec)r + import re + import sys + from pkg_resources import load_entry_point + + if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) + sys.exit( + load_entry_point(%(spec)r, %(group)r, %(name)r)() + ) + """).lstrip() + + command_spec_class = CommandSpec + + @classmethod + def get_script_args(cls, dist, executable=None, wininst=False): + # for backward compatibility + warnings.warn("Use get_args", EasyInstallDeprecationWarning) + writer = (WindowsScriptWriter if wininst else ScriptWriter).best() + header = cls.get_script_header("", executable, wininst) + return writer.get_args(dist, header) + + @classmethod + def get_script_header(cls, script_text, executable=None, wininst=False): + # for backward compatibility + warnings.warn("Use get_header", EasyInstallDeprecationWarning, stacklevel=2) + if wininst: + executable = "python.exe" + return cls.get_header(script_text, executable) + + @classmethod + def get_args(cls, dist, header=None): + """ + Yield write_script() argument tuples for a distribution's + console_scripts and gui_scripts entry points. + """ + if header is None: + header = cls.get_header() + spec = str(dist.as_requirement()) + for type_ in 'console', 'gui': + group = type_ + '_scripts' + for name, ep in dist.get_entry_map(group).items(): + cls._ensure_safe_name(name) + script_text = cls.template % locals() + args = cls._get_script_args(type_, name, header, script_text) + for res in args: + yield res + + @staticmethod + def _ensure_safe_name(name): + """ + Prevent paths in *_scripts entry point names. + """ + has_path_sep = re.search(r'[\\/]', name) + if has_path_sep: + raise ValueError("Path separators not allowed in script names") + + @classmethod + def get_writer(cls, force_windows): + # for backward compatibility + warnings.warn("Use best", EasyInstallDeprecationWarning) + return WindowsScriptWriter.best() if force_windows else cls.best() + + @classmethod + def best(cls): + """ + Select the best ScriptWriter for this environment. + """ + if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'): + return WindowsScriptWriter.best() + else: + return cls + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + # Simply write the stub with no extension. + yield (name, header + script_text) + + @classmethod + def get_header(cls, script_text="", executable=None): + """Create a #! line, getting options (if any) from script_text""" + cmd = cls.command_spec_class.best().from_param(executable) + cmd.install_options(script_text) + return cmd.as_header() + + +class WindowsScriptWriter(ScriptWriter): + command_spec_class = WindowsCommandSpec + + @classmethod + def get_writer(cls): + # for backward compatibility + warnings.warn("Use best", EasyInstallDeprecationWarning) + return cls.best() + + @classmethod + def best(cls): + """ + Select the best ScriptWriter suitable for Windows + """ + writer_lookup = dict( + executable=WindowsExecutableLauncherWriter, + natural=cls, + ) + # for compatibility, use the executable launcher by default + launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') + return writer_lookup[launcher] + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + "For Windows, add a .py extension" + ext = dict(console='.pya', gui='.pyw')[type_] + if ext not in os.environ['PATHEXT'].lower().split(';'): + msg = ( + "{ext} not listed in PATHEXT; scripts will not be " + "recognized as executables." + ).format(**locals()) + warnings.warn(msg, UserWarning) + old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] + old.remove(ext) + header = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield name + ext, header + script_text, 't', blockers + + @classmethod + def _adjust_header(cls, type_, orig_header): + """ + Make sure 'pythonw' is used for gui and and 'python' is used for + console (regardless of what sys.executable is). + """ + pattern = 'pythonw.exe' + repl = 'python.exe' + if type_ == 'gui': + pattern, repl = repl, pattern + pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) + new_header = pattern_ob.sub(string=orig_header, repl=repl) + return new_header if cls._use_header(new_header) else orig_header + + @staticmethod + def _use_header(new_header): + """ + Should _adjust_header use the replaced header? + + On non-windows systems, always use. On + Windows systems, only use the replaced header if it resolves + to an executable on the system. + """ + clean_header = new_header[2:-1].strip('"') + return sys.platform != 'win32' or find_executable(clean_header) + + +class WindowsExecutableLauncherWriter(WindowsScriptWriter): + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + """ + For Windows, add a .py extension and an .exe launcher + """ + if type_ == 'gui': + launcher_type = 'gui' + ext = '-script.pyw' + old = ['.pyw'] + else: + launcher_type = 'cli' + ext = '-script.py' + old = ['.py', '.pyc', '.pyo'] + hdr = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield (name + ext, hdr + script_text, 't', blockers) + yield ( + name + '.exe', get_win_launcher(launcher_type), + 'b' # write in binary mode + ) + if not is_64bit(): + # install a manifest for the launcher to prevent Windows + # from detecting it as an installer (which it will for + # launchers like easy_install.exe). Consider only + # adding a manifest for launchers detected as installers. + # See Distribute #143 for details. + m_name = name + '.exe.manifest' + yield (m_name, load_launcher_manifest(name), 't') + + +# for backward-compatibility +get_script_args = ScriptWriter.get_script_args +get_script_header = ScriptWriter.get_script_header + + +def get_win_launcher(type): + """ + Load the Windows launcher (executable) suitable for launching a script. + + `type` should be either 'cli' or 'gui' + + Returns the executable as a byte string. + """ + launcher_fn = '%s.exe' % type + if is_64bit(): + launcher_fn = launcher_fn.replace(".", "-64.") + else: + launcher_fn = launcher_fn.replace(".", "-32.") + return resource_string('setuptools', launcher_fn) + + +def load_launcher_manifest(name): + manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') + if six.PY2: + return manifest % vars() + else: + return manifest.decode('utf-8') % vars() + + +def rmtree(path, ignore_errors=False, onerror=auto_chmod): + return shutil.rmtree(path, ignore_errors, onerror) + + +def current_umask(): + tmp = os.umask(0o022) + os.umask(tmp) + return tmp + + +def bootstrap(): + # This function is called when setuptools*.egg is run using /bin/sh + import setuptools + + argv0 = os.path.dirname(setuptools.__path__[0]) + sys.argv[0] = argv0 + sys.argv.append(argv0) + main() + + +def main(argv=None, **kw): + from setuptools import setup + from setuptools.dist import Distribution + + class DistributionWithoutHelpCommands(Distribution): + common_usage = "" + + def _show_help(self, *args, **kw): + with _patch_usage(): + Distribution._show_help(self, *args, **kw) + + if argv is None: + argv = sys.argv[1:] + + with _patch_usage(): + setup( + script_args=['-q', 'easy_install', '-v'] + argv, + script_name=sys.argv[0] or 'easy_install', + distclass=DistributionWithoutHelpCommands, + **kw + ) + + +@contextlib.contextmanager +def _patch_usage(): + import distutils.core + USAGE = textwrap.dedent(""" + usage: %(script)s [options] requirement_or_url ... + or: %(script)s --help + """).lstrip() + + def gen_usage(script_name): + return USAGE % dict( + script=os.path.basename(script_name), + ) + + saved = distutils.core.gen_usage + distutils.core.gen_usage = gen_usage + try: + yield + finally: + distutils.core.gen_usage = saved + +class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning): + """Class for warning about deprecations in EasyInstall in SetupTools. Not ignored by default, unlike DeprecationWarning.""" + diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/egg_info.py b/backend/test/lib/python3.8/site-packages/setuptools/command/egg_info.py new file mode 100644 index 0000000000000000000000000000000000000000..b767ef31d3155dd0292f748f8749c405fd1d3258 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/egg_info.py @@ -0,0 +1,717 @@ +"""setuptools.command.egg_info + +Create a distribution's .egg-info directory and contents""" + +from distutils.filelist import FileList as _FileList +from distutils.errors import DistutilsInternalError +from distutils.util import convert_path +from distutils import log +import distutils.errors +import distutils.filelist +import os +import re +import sys +import io +import warnings +import time +import collections + +from setuptools.extern import six +from setuptools.extern.six.moves import map + +from setuptools import Command +from setuptools.command.sdist import sdist +from setuptools.command.sdist import walk_revctrl +from setuptools.command.setopt import edit_config +from setuptools.command import bdist_egg +from pkg_resources import ( + parse_requirements, safe_name, parse_version, + safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) +import setuptools.unicode_utils as unicode_utils +from setuptools.glob import glob + +from setuptools.extern import packaging +from setuptools import SetuptoolsDeprecationWarning + +def translate_pattern(glob): + """ + Translate a file path glob like '*.txt' in to a regular expression. + This differs from fnmatch.translate which allows wildcards to match + directory separators. It also knows about '**/' which matches any number of + directories. + """ + pat = '' + + # This will split on '/' within [character classes]. This is deliberate. + chunks = glob.split(os.path.sep) + + sep = re.escape(os.sep) + valid_char = '[^%s]' % (sep,) + + for c, chunk in enumerate(chunks): + last_chunk = c == len(chunks) - 1 + + # Chunks that are a literal ** are globstars. They match anything. + if chunk == '**': + if last_chunk: + # Match anything if this is the last component + pat += '.*' + else: + # Match '(name/)*' + pat += '(?:%s+%s)*' % (valid_char, sep) + continue # Break here as the whole path component has been handled + + # Find any special characters in the remainder + i = 0 + chunk_len = len(chunk) + while i < chunk_len: + char = chunk[i] + if char == '*': + # Match any number of name characters + pat += valid_char + '*' + elif char == '?': + # Match a name character + pat += valid_char + elif char == '[': + # Character class + inner_i = i + 1 + # Skip initial !/] chars + if inner_i < chunk_len and chunk[inner_i] == '!': + inner_i = inner_i + 1 + if inner_i < chunk_len and chunk[inner_i] == ']': + inner_i = inner_i + 1 + + # Loop till the closing ] is found + while inner_i < chunk_len and chunk[inner_i] != ']': + inner_i = inner_i + 1 + + if inner_i >= chunk_len: + # Got to the end of the string without finding a closing ] + # Do not treat this as a matching group, but as a literal [ + pat += re.escape(char) + else: + # Grab the insides of the [brackets] + inner = chunk[i + 1:inner_i] + char_class = '' + + # Class negation + if inner[0] == '!': + char_class = '^' + inner = inner[1:] + + char_class += re.escape(inner) + pat += '[%s]' % (char_class,) + + # Skip to the end ] + i = inner_i + else: + pat += re.escape(char) + i += 1 + + # Join each chunk with the dir separator + if not last_chunk: + pat += sep + + pat += r'\Z' + return re.compile(pat, flags=re.MULTILINE|re.DOTALL) + + +class InfoCommon: + tag_build = None + tag_date = None + + @property + def name(self): + return safe_name(self.distribution.get_name()) + + def tagged_version(self): + version = self.distribution.get_version() + # egg_info may be called more than once for a distribution, + # in which case the version string already contains all tags. + if self.vtags and version.endswith(self.vtags): + return safe_version(version) + return safe_version(version + self.vtags) + + def tags(self): + version = '' + if self.tag_build: + version += self.tag_build + if self.tag_date: + version += time.strftime("-%Y%m%d") + return version + vtags = property(tags) + + +class egg_info(InfoCommon, Command): + description = "create a distribution's .egg-info directory" + + user_options = [ + ('egg-base=', 'e', "directory containing .egg-info directories" + " (default: top of the source tree)"), + ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), + ('tag-build=', 'b', "Specify explicit tag to add to version number"), + ('no-date', 'D', "Don't include date stamp [default]"), + ] + + boolean_options = ['tag-date'] + negative_opt = { + 'no-date': 'tag-date', + } + + def initialize_options(self): + self.egg_base = None + self.egg_name = None + self.egg_info = None + self.egg_version = None + self.broken_egg_info = False + + #################################### + # allow the 'tag_svn_revision' to be detected and + # set, supporting sdists built on older Setuptools. + @property + def tag_svn_revision(self): + pass + + @tag_svn_revision.setter + def tag_svn_revision(self, value): + pass + #################################### + + def save_version_info(self, filename): + """ + Materialize the value of date into the + build tag. Install build keys in a deterministic order + to avoid arbitrary reordering on subsequent builds. + """ + egg_info = collections.OrderedDict() + # follow the order these keys would have been added + # when PYTHONHASHSEED=0 + egg_info['tag_build'] = self.tags() + egg_info['tag_date'] = 0 + edit_config(filename, dict(egg_info=egg_info)) + + def finalize_options(self): + # Note: we need to capture the current value returned + # by `self.tagged_version()`, so we can later update + # `self.distribution.metadata.version` without + # repercussions. + self.egg_name = self.name + self.egg_version = self.tagged_version() + parsed_version = parse_version(self.egg_version) + + try: + is_version = isinstance(parsed_version, packaging.version.Version) + spec = ( + "%s==%s" if is_version else "%s===%s" + ) + list( + parse_requirements(spec % (self.egg_name, self.egg_version)) + ) + except ValueError: + raise distutils.errors.DistutilsOptionError( + "Invalid distribution name or version syntax: %s-%s" % + (self.egg_name, self.egg_version) + ) + + if self.egg_base is None: + dirs = self.distribution.package_dir + self.egg_base = (dirs or {}).get('', os.curdir) + + self.ensure_dirname('egg_base') + self.egg_info = to_filename(self.egg_name) + '.egg-info' + if self.egg_base != os.curdir: + self.egg_info = os.path.join(self.egg_base, self.egg_info) + if '-' in self.egg_name: + self.check_broken_egg_info() + + # Set package version for the benefit of dumber commands + # (e.g. sdist, bdist_wininst, etc.) + # + self.distribution.metadata.version = self.egg_version + + # If we bootstrapped around the lack of a PKG-INFO, as might be the + # case in a fresh checkout, make sure that any special tags get added + # to the version info + # + pd = self.distribution._patched_dist + if pd is not None and pd.key == self.egg_name.lower(): + pd._version = self.egg_version + pd._parsed_version = parse_version(self.egg_version) + self.distribution._patched_dist = None + + def write_or_delete_file(self, what, filename, data, force=False): + """Write `data` to `filename` or delete if empty + + If `data` is non-empty, this routine is the same as ``write_file()``. + If `data` is empty but not ``None``, this is the same as calling + ``delete_file(filename)`. If `data` is ``None``, then this is a no-op + unless `filename` exists, in which case a warning is issued about the + orphaned file (if `force` is false), or deleted (if `force` is true). + """ + if data: + self.write_file(what, filename, data) + elif os.path.exists(filename): + if data is None and not force: + log.warn( + "%s not set in setup(), but %s exists", what, filename + ) + return + else: + self.delete_file(filename) + + def write_file(self, what, filename, data): + """Write `data` to `filename` (if not a dry run) after announcing it + + `what` is used in a log message to identify what is being written + to the file. + """ + log.info("writing %s to %s", what, filename) + if six.PY3: + data = data.encode("utf-8") + if not self.dry_run: + f = open(filename, 'wb') + f.write(data) + f.close() + + def delete_file(self, filename): + """Delete `filename` (if not a dry run) after announcing it""" + log.info("deleting %s", filename) + if not self.dry_run: + os.unlink(filename) + + def run(self): + self.mkpath(self.egg_info) + os.utime(self.egg_info, None) + installer = self.distribution.fetch_build_egg + for ep in iter_entry_points('egg_info.writers'): + ep.require(installer=installer) + writer = ep.resolve() + writer(self, ep.name, os.path.join(self.egg_info, ep.name)) + + # Get rid of native_libs.txt if it was put there by older bdist_egg + nl = os.path.join(self.egg_info, "native_libs.txt") + if os.path.exists(nl): + self.delete_file(nl) + + self.find_sources() + + def find_sources(self): + """Generate SOURCES.txt manifest file""" + manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") + mm = manifest_maker(self.distribution) + mm.manifest = manifest_filename + mm.run() + self.filelist = mm.filelist + + def check_broken_egg_info(self): + bei = self.egg_name + '.egg-info' + if self.egg_base != os.curdir: + bei = os.path.join(self.egg_base, bei) + if os.path.exists(bei): + log.warn( + "-" * 78 + '\n' + "Note: Your current .egg-info directory has a '-' in its name;" + '\nthis will not work correctly with "setup.py develop".\n\n' + 'Please rename %s to %s to correct this problem.\n' + '-' * 78, + bei, self.egg_info + ) + self.broken_egg_info = self.egg_info + self.egg_info = bei # make it work for now + + +class FileList(_FileList): + # Implementations of the various MANIFEST.in commands + + def process_template_line(self, line): + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dir_pattern). + (action, patterns, dir, dir_pattern) = self._parse_template_line(line) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + self.debug_print("include " + ' '.join(patterns)) + for pattern in patterns: + if not self.include(pattern): + log.warn("warning: no files found matching '%s'", pattern) + + elif action == 'exclude': + self.debug_print("exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.exclude(pattern): + log.warn(("warning: no previously-included files " + "found matching '%s'"), pattern) + + elif action == 'global-include': + self.debug_print("global-include " + ' '.join(patterns)) + for pattern in patterns: + if not self.global_include(pattern): + log.warn(("warning: no files found matching '%s' " + "anywhere in distribution"), pattern) + + elif action == 'global-exclude': + self.debug_print("global-exclude " + ' '.join(patterns)) + for pattern in patterns: + if not self.global_exclude(pattern): + log.warn(("warning: no previously-included files matching " + "'%s' found anywhere in distribution"), + pattern) + + elif action == 'recursive-include': + self.debug_print("recursive-include %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.recursive_include(dir, pattern): + log.warn(("warning: no files found matching '%s' " + "under directory '%s'"), + pattern, dir) + + elif action == 'recursive-exclude': + self.debug_print("recursive-exclude %s %s" % + (dir, ' '.join(patterns))) + for pattern in patterns: + if not self.recursive_exclude(dir, pattern): + log.warn(("warning: no previously-included files matching " + "'%s' found under directory '%s'"), + pattern, dir) + + elif action == 'graft': + self.debug_print("graft " + dir_pattern) + if not self.graft(dir_pattern): + log.warn("warning: no directories found matching '%s'", + dir_pattern) + + elif action == 'prune': + self.debug_print("prune " + dir_pattern) + if not self.prune(dir_pattern): + log.warn(("no previously-included directories found " + "matching '%s'"), dir_pattern) + + else: + raise DistutilsInternalError( + "this cannot happen: invalid action '%s'" % action) + + def _remove_files(self, predicate): + """ + Remove all files from the file list that match the predicate. + Return True if any matching files were removed + """ + found = False + for i in range(len(self.files) - 1, -1, -1): + if predicate(self.files[i]): + self.debug_print(" removing " + self.files[i]) + del self.files[i] + found = True + return found + + def include(self, pattern): + """Include files that match 'pattern'.""" + found = [f for f in glob(pattern) if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def exclude(self, pattern): + """Exclude files that match 'pattern'.""" + match = translate_pattern(pattern) + return self._remove_files(match.match) + + def recursive_include(self, dir, pattern): + """ + Include all files anywhere in 'dir/' that match the pattern. + """ + full_pattern = os.path.join(dir, '**', pattern) + found = [f for f in glob(full_pattern, recursive=True) + if not os.path.isdir(f)] + self.extend(found) + return bool(found) + + def recursive_exclude(self, dir, pattern): + """ + Exclude any file anywhere in 'dir/' that match the pattern. + """ + match = translate_pattern(os.path.join(dir, '**', pattern)) + return self._remove_files(match.match) + + def graft(self, dir): + """Include all files from 'dir/'.""" + found = [ + item + for match_dir in glob(dir) + for item in distutils.filelist.findall(match_dir) + ] + self.extend(found) + return bool(found) + + def prune(self, dir): + """Filter out files from 'dir/'.""" + match = translate_pattern(os.path.join(dir, '**')) + return self._remove_files(match.match) + + def global_include(self, pattern): + """ + Include all files anywhere in the current directory that match the + pattern. This is very inefficient on large file trees. + """ + if self.allfiles is None: + self.findall() + match = translate_pattern(os.path.join('**', pattern)) + found = [f for f in self.allfiles if match.match(f)] + self.extend(found) + return bool(found) + + def global_exclude(self, pattern): + """ + Exclude all files anywhere that match the pattern. + """ + match = translate_pattern(os.path.join('**', pattern)) + return self._remove_files(match.match) + + def append(self, item): + if item.endswith('\r'): # Fix older sdists built on Windows + item = item[:-1] + path = convert_path(item) + + if self._safe_path(path): + self.files.append(path) + + def extend(self, paths): + self.files.extend(filter(self._safe_path, paths)) + + def _repair(self): + """ + Replace self.files with only safe paths + + Because some owners of FileList manipulate the underlying + ``files`` attribute directly, this method must be called to + repair those paths. + """ + self.files = list(filter(self._safe_path, self.files)) + + def _safe_path(self, path): + enc_warn = "'%s' not %s encodable -- skipping" + + # To avoid accidental trans-codings errors, first to unicode + u_path = unicode_utils.filesys_decode(path) + if u_path is None: + log.warn("'%s' in unexpected encoding -- skipping" % path) + return False + + # Must ensure utf-8 encodability + utf8_path = unicode_utils.try_encode(u_path, "utf-8") + if utf8_path is None: + log.warn(enc_warn, path, 'utf-8') + return False + + try: + # accept is either way checks out + if os.path.exists(u_path) or os.path.exists(utf8_path): + return True + # this will catch any encode errors decoding u_path + except UnicodeEncodeError: + log.warn(enc_warn, path, sys.getfilesystemencoding()) + + +class manifest_maker(sdist): + template = "MANIFEST.in" + + def initialize_options(self): + self.use_defaults = 1 + self.prune = 1 + self.manifest_only = 1 + self.force_manifest = 1 + + def finalize_options(self): + pass + + def run(self): + self.filelist = FileList() + if not os.path.exists(self.manifest): + self.write_manifest() # it must exist so it'll get in the list + self.add_defaults() + if os.path.exists(self.template): + self.read_template() + self.prune_file_list() + self.filelist.sort() + self.filelist.remove_duplicates() + self.write_manifest() + + def _manifest_normalize(self, path): + path = unicode_utils.filesys_decode(path) + return path.replace(os.sep, '/') + + def write_manifest(self): + """ + Write the file list in 'self.filelist' to the manifest file + named by 'self.manifest'. + """ + self.filelist._repair() + + # Now _repairs should encodability, but not unicode + files = [self._manifest_normalize(f) for f in self.filelist.files] + msg = "writing manifest file '%s'" % self.manifest + self.execute(write_file, (self.manifest, files), msg) + + def warn(self, msg): + if not self._should_suppress_warning(msg): + sdist.warn(self, msg) + + @staticmethod + def _should_suppress_warning(msg): + """ + suppress missing-file warnings from sdist + """ + return re.match(r"standard file .*not found", msg) + + def add_defaults(self): + sdist.add_defaults(self) + self.check_license() + self.filelist.append(self.template) + self.filelist.append(self.manifest) + rcfiles = list(walk_revctrl()) + if rcfiles: + self.filelist.extend(rcfiles) + elif os.path.exists(self.manifest): + self.read_manifest() + + if os.path.exists("setup.py"): + # setup.py should be included by default, even if it's not + # the script called to create the sdist + self.filelist.append("setup.py") + + ei_cmd = self.get_finalized_command('egg_info') + self.filelist.graft(ei_cmd.egg_info) + + def prune_file_list(self): + build = self.get_finalized_command('build') + base_dir = self.distribution.get_fullname() + self.filelist.prune(build.build_base) + self.filelist.prune(base_dir) + sep = re.escape(os.sep) + self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, + is_regex=1) + + +def write_file(filename, contents): + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ + contents = "\n".join(contents) + + # assuming the contents has been vetted for utf-8 encoding + contents = contents.encode("utf-8") + + with open(filename, "wb") as f: # always write POSIX-style manifest + f.write(contents) + + +def write_pkg_info(cmd, basename, filename): + log.info("writing %s", filename) + if not cmd.dry_run: + metadata = cmd.distribution.metadata + metadata.version, oldver = cmd.egg_version, metadata.version + metadata.name, oldname = cmd.egg_name, metadata.name + + try: + # write unescaped data to PKG-INFO, so older pkg_resources + # can still parse it + metadata.write_pkg_info(cmd.egg_info) + finally: + metadata.name, metadata.version = oldname, oldver + + safe = getattr(cmd.distribution, 'zip_safe', None) + + bdist_egg.write_safety_flag(cmd.egg_info, safe) + + +def warn_depends_obsolete(cmd, basename, filename): + if os.path.exists(filename): + log.warn( + "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + +def _write_requirements(stream, reqs): + lines = yield_lines(reqs or ()) + append_cr = lambda line: line + '\n' + lines = map(append_cr, sorted(lines)) + stream.writelines(lines) + + +def write_requirements(cmd, basename, filename): + dist = cmd.distribution + data = six.StringIO() + _write_requirements(data, dist.install_requires) + extras_require = dist.extras_require or {} + for extra in sorted(extras_require): + data.write('\n[{extra}]\n'.format(**vars())) + _write_requirements(data, extras_require[extra]) + cmd.write_or_delete_file("requirements", filename, data.getvalue()) + + +def write_setup_requirements(cmd, basename, filename): + data = io.StringIO() + _write_requirements(data, cmd.distribution.setup_requires) + cmd.write_or_delete_file("setup-requirements", filename, data.getvalue()) + + +def write_toplevel_names(cmd, basename, filename): + pkgs = dict.fromkeys( + [ + k.split('.', 1)[0] + for k in cmd.distribution.iter_distribution_names() + ] + ) + cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') + + +def overwrite_arg(cmd, basename, filename): + write_arg(cmd, basename, filename, True) + + +def write_arg(cmd, basename, filename, force=False): + argname = os.path.splitext(basename)[0] + value = getattr(cmd.distribution, argname, None) + if value is not None: + value = '\n'.join(value) + '\n' + cmd.write_or_delete_file(argname, filename, value, force) + + +def write_entries(cmd, basename, filename): + ep = cmd.distribution.entry_points + + if isinstance(ep, six.string_types) or ep is None: + data = ep + elif ep is not None: + data = [] + for section, contents in sorted(ep.items()): + if not isinstance(contents, six.string_types): + contents = EntryPoint.parse_group(section, contents) + contents = '\n'.join(sorted(map(str, contents.values()))) + data.append('[%s]\n%s\n\n' % (section, contents)) + data = ''.join(data) + + cmd.write_or_delete_file('entry points', filename, data, True) + + +def get_pkg_info_revision(): + """ + Get a -r### off of PKG-INFO Version in case this is an sdist of + a subversion revision. + """ + warnings.warn("get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) + if os.path.exists('PKG-INFO'): + with io.open('PKG-INFO') as f: + for line in f: + match = re.match(r"Version:.*-r(\d+)\s*$", line) + if match: + return int(match.group(1)) + return 0 + + +class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): + """Class for warning about deprecations in eggInfo in setupTools. Not ignored by default, unlike DeprecationWarning.""" diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/install.py b/backend/test/lib/python3.8/site-packages/setuptools/command/install.py new file mode 100644 index 0000000000000000000000000000000000000000..72b9a3e424707633c7e31a347170f358cfa3f87a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/install.py @@ -0,0 +1,125 @@ +from distutils.errors import DistutilsArgError +import inspect +import glob +import warnings +import platform +import distutils.command.install as orig + +import setuptools + +# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for +# now. See https://github.com/pypa/setuptools/issues/199/ +_install = orig.install + + +class install(orig.install): + """Use easy_install to install the package, w/dependencies""" + + user_options = orig.install.user_options + [ + ('old-and-unmanageable', None, "Try not to use this!"), + ('single-version-externally-managed', None, + "used by system package builders to create 'flat' eggs"), + ] + boolean_options = orig.install.boolean_options + [ + 'old-and-unmanageable', 'single-version-externally-managed', + ] + new_commands = [ + ('install_egg_info', lambda self: True), + ('install_scripts', lambda self: True), + ] + _nc = dict(new_commands) + + def initialize_options(self): + orig.install.initialize_options(self) + self.old_and_unmanageable = None + self.single_version_externally_managed = None + + def finalize_options(self): + orig.install.finalize_options(self) + if self.root: + self.single_version_externally_managed = True + elif self.single_version_externally_managed: + if not self.root and not self.record: + raise DistutilsArgError( + "You must specify --record or --root when building system" + " packages" + ) + + def handle_extra_path(self): + if self.root or self.single_version_externally_managed: + # explicit backward-compatibility mode, allow extra_path to work + return orig.install.handle_extra_path(self) + + # Ignore extra_path when installing an egg (or being run by another + # command without --root or --single-version-externally-managed + self.path_file = None + self.extra_dirs = '' + + def run(self): + # Explicit request for old-style install? Just do it + if self.old_and_unmanageable or self.single_version_externally_managed: + return orig.install.run(self) + + if not self._called_from_setup(inspect.currentframe()): + # Run in backward-compatibility mode to support bdist_* commands. + orig.install.run(self) + else: + self.do_egg_install() + + @staticmethod + def _called_from_setup(run_frame): + """ + Attempt to detect whether run() was called from setup() or by another + command. If called by setup(), the parent caller will be the + 'run_command' method in 'distutils.dist', and *its* caller will be + the 'run_commands' method. If called any other way, the + immediate caller *might* be 'run_command', but it won't have been + called by 'run_commands'. Return True in that case or if a call stack + is unavailable. Return False otherwise. + """ + if run_frame is None: + msg = "Call stack not available. bdist_* commands may fail." + warnings.warn(msg) + if platform.python_implementation() == 'IronPython': + msg = "For best results, pass -X:Frames to enable call stack." + warnings.warn(msg) + return True + res = inspect.getouterframes(run_frame)[2] + caller, = res[:1] + info = inspect.getframeinfo(caller) + caller_module = caller.f_globals.get('__name__', '') + return ( + caller_module == 'distutils.dist' + and info.function == 'run_commands' + ) + + def do_egg_install(self): + + easy_install = self.distribution.get_command_class('easy_install') + + cmd = easy_install( + self.distribution, args="x", root=self.root, record=self.record, + ) + cmd.ensure_finalized() # finalize before bdist_egg munges install cmd + cmd.always_copy_from = '.' # make sure local-dir eggs get installed + + # pick up setup-dir .egg files only: no .egg-info + cmd.package_index.scan(glob.glob('*.egg')) + + self.run_command('bdist_egg') + args = [self.distribution.get_command_obj('bdist_egg').egg_output] + + if setuptools.bootstrap_install_from: + # Bootstrap self-installation of setuptools + args.insert(0, setuptools.bootstrap_install_from) + + cmd.args = args + cmd.run(show_deprecation=False) + setuptools.bootstrap_install_from = None + + +# XXX Python 3.1 doesn't see _nc if this is inside the class +install.sub_commands = ( + [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + + install.new_commands +) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/install_egg_info.py b/backend/test/lib/python3.8/site-packages/setuptools/command/install_egg_info.py new file mode 100644 index 0000000000000000000000000000000000000000..5f405bcad743bac704e90c5489713a5cd4404497 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/install_egg_info.py @@ -0,0 +1,82 @@ +from distutils import log, dir_util +import os, sys + +from setuptools import Command +from setuptools import namespaces +from setuptools.archive_util import unpack_archive +import pkg_resources + + +class install_egg_info(namespaces.Installer, Command): + """Install an .egg-info directory for the package""" + + description = "Install an .egg-info directory for the package" + + user_options = [ + ('install-dir=', 'd', "directory to install to"), + ] + + def initialize_options(self): + self.install_dir = None + self.install_layout = None + self.prefix_option = None + + def finalize_options(self): + self.set_undefined_options('install_lib', + ('install_dir', 'install_dir')) + self.set_undefined_options('install',('install_layout','install_layout')) + if sys.hexversion > 0x2060000: + self.set_undefined_options('install',('prefix_option','prefix_option')) + ei_cmd = self.get_finalized_command("egg_info") + basename = pkg_resources.Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version + ).egg_name() + '.egg-info' + + if self.install_layout: + if not self.install_layout.lower() in ['deb']: + raise DistutilsOptionError("unknown value for --install-layout") + self.install_layout = self.install_layout.lower() + basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '') + elif self.prefix_option or 'real_prefix' in sys.__dict__: + # don't modify for virtualenv + pass + else: + basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '') + + self.source = ei_cmd.egg_info + self.target = os.path.join(self.install_dir, basename) + self.outputs = [] + + def run(self): + self.run_command('egg_info') + if os.path.isdir(self.target) and not os.path.islink(self.target): + dir_util.remove_tree(self.target, dry_run=self.dry_run) + elif os.path.exists(self.target): + self.execute(os.unlink, (self.target,), "Removing " + self.target) + if not self.dry_run: + pkg_resources.ensure_directory(self.target) + self.execute( + self.copytree, (), "Copying %s to %s" % (self.source, self.target) + ) + self.install_namespaces() + + def get_outputs(self): + return self.outputs + + def copytree(self): + # Copy the .egg-info tree to site-packages + def skimmer(src, dst): + # filter out source-control directories; note that 'src' is always + # a '/'-separated path, regardless of platform. 'dst' is a + # platform-specific path. + for skip in '.svn/', 'CVS/': + if src.startswith(skip) or '/' + skip in src: + return None + if self.install_layout and self.install_layout in ['deb'] and src.startswith('SOURCES.txt'): + log.info("Skipping SOURCES.txt") + return None + self.outputs.append(dst) + log.debug("Copying %s to %s", src, dst) + return dst + + unpack_archive(self.source, self.target, skimmer) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/install_lib.py b/backend/test/lib/python3.8/site-packages/setuptools/command/install_lib.py new file mode 100644 index 0000000000000000000000000000000000000000..bf81519d98e8221707f45c1a3901b8d836095d30 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/install_lib.py @@ -0,0 +1,147 @@ +import os +import sys +from itertools import product, starmap +import distutils.command.install_lib as orig + + +class install_lib(orig.install_lib): + """Don't add compiled flags to filenames of non-Python files""" + + def initialize_options(self): + orig.install_lib.initialize_options(self) + self.multiarch = None + self.install_layout = None + + def finalize_options(self): + orig.install_lib.finalize_options(self) + self.set_undefined_options('install',('install_layout','install_layout')) + if self.install_layout == 'deb' and sys.version_info[:2] >= (3, 3): + import sysconfig + self.multiarch = sysconfig.get_config_var('MULTIARCH') + + def run(self): + self.build() + outfiles = self.install() + if outfiles is not None: + # always compile, in case we have any extension stubs to deal with + self.byte_compile(outfiles) + + def get_exclusions(self): + """ + Return a collections.Sized collections.Container of paths to be + excluded for single_version_externally_managed installations. + """ + all_packages = ( + pkg + for ns_pkg in self._get_SVEM_NSPs() + for pkg in self._all_packages(ns_pkg) + ) + + excl_specs = product(all_packages, self._gen_exclusion_paths()) + return set(starmap(self._exclude_pkg_path, excl_specs)) + + def _exclude_pkg_path(self, pkg, exclusion_path): + """ + Given a package name and exclusion path within that package, + compute the full exclusion path. + """ + parts = pkg.split('.') + [exclusion_path] + return os.path.join(self.install_dir, *parts) + + @staticmethod + def _all_packages(pkg_name): + """ + >>> list(install_lib._all_packages('foo.bar.baz')) + ['foo.bar.baz', 'foo.bar', 'foo'] + """ + while pkg_name: + yield pkg_name + pkg_name, sep, child = pkg_name.rpartition('.') + + def _get_SVEM_NSPs(self): + """ + Get namespace packages (list) but only for + single_version_externally_managed installations and empty otherwise. + """ + # TODO: is it necessary to short-circuit here? i.e. what's the cost + # if get_finalized_command is called even when namespace_packages is + # False? + if not self.distribution.namespace_packages: + return [] + + install_cmd = self.get_finalized_command('install') + svem = install_cmd.single_version_externally_managed + + return self.distribution.namespace_packages if svem else [] + + @staticmethod + def _gen_exclusion_paths(): + """ + Generate file paths to be excluded for namespace packages (bytecode + cache files). + """ + # always exclude the package module itself + yield '__init__.py' + + yield '__init__.pyc' + yield '__init__.pyo' + + if not hasattr(sys, 'implementation'): + return + + base = os.path.join('__pycache__', '__init__.' + sys.implementation.cache_tag) + yield base + '.pyc' + yield base + '.pyo' + yield base + '.opt-1.pyc' + yield base + '.opt-2.pyc' + + def copy_tree( + self, infile, outfile, + preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 + ): + assert preserve_mode and preserve_times and not preserve_symlinks + exclude = self.get_exclusions() + + if not exclude: + import distutils.dir_util + distutils.dir_util._multiarch = self.multiarch + return orig.install_lib.copy_tree(self, infile, outfile) + + # Exclude namespace package __init__.py* files from the output + + from setuptools.archive_util import unpack_directory + from distutils import log + + outfiles = [] + + if self.multiarch: + import sysconfig + ext_suffix = sysconfig.get_config_var ('EXT_SUFFIX') + if ext_suffix.endswith(self.multiarch + ext_suffix[-3:]): + new_suffix = None + else: + new_suffix = "%s-%s%s" % (ext_suffix[:-3], self.multiarch, ext_suffix[-3:]) + + def pf(src, dst): + if dst in exclude: + log.warn("Skipping installation of %s (namespace package)", + dst) + return False + + if self.multiarch and new_suffix and dst.endswith(ext_suffix) and not dst.endswith(new_suffix): + dst = dst.replace(ext_suffix, new_suffix) + log.info("renaming extension to %s", os.path.basename(dst)) + + log.info("copying %s -> %s", src, os.path.dirname(dst)) + outfiles.append(dst) + return dst + + unpack_directory(infile, outfile, pf) + return outfiles + + def get_outputs(self): + outputs = orig.install_lib.get_outputs(self) + exclude = self.get_exclusions() + if exclude: + return [f for f in outputs if f not in exclude] + return outputs diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/install_scripts.py b/backend/test/lib/python3.8/site-packages/setuptools/command/install_scripts.py new file mode 100644 index 0000000000000000000000000000000000000000..16234273a2d36b0b3d821a7a97bf8f03cf3f2948 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/install_scripts.py @@ -0,0 +1,65 @@ +from distutils import log +import distutils.command.install_scripts as orig +import os +import sys + +from pkg_resources import Distribution, PathMetadata, ensure_directory + + +class install_scripts(orig.install_scripts): + """Do normal script install, plus any egg_info wrapper scripts""" + + def initialize_options(self): + orig.install_scripts.initialize_options(self) + self.no_ep = False + + def run(self): + import setuptools.command.easy_install as ei + + self.run_command("egg_info") + if self.distribution.scripts: + orig.install_scripts.run(self) # run first to set up self.outfiles + else: + self.outfiles = [] + if self.no_ep: + # don't install entry point scripts into .egg file! + return + + ei_cmd = self.get_finalized_command("egg_info") + dist = Distribution( + ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), + ei_cmd.egg_name, ei_cmd.egg_version, + ) + bs_cmd = self.get_finalized_command('build_scripts') + exec_param = getattr(bs_cmd, 'executable', None) + bw_cmd = self.get_finalized_command("bdist_wininst") + is_wininst = getattr(bw_cmd, '_is_running', False) + writer = ei.ScriptWriter + if is_wininst: + exec_param = "python.exe" + writer = ei.WindowsScriptWriter + if exec_param == sys.executable: + # In case the path to the Python executable contains a space, wrap + # it so it's not split up. + exec_param = [exec_param] + # resolve the writer to the environment + writer = writer.best() + cmd = writer.command_spec_class.best().from_param(exec_param) + for args in writer.get_args(dist, cmd.as_header()): + self.write_script(*args) + + def write_script(self, script_name, contents, mode="t", *ignored): + """Write an executable file to the scripts directory""" + from setuptools.command.easy_install import chmod, current_umask + + log.info("Installing %s script to %s", script_name, self.install_dir) + target = os.path.join(self.install_dir, script_name) + self.outfiles.append(target) + + mask = current_umask() + if not self.dry_run: + ensure_directory(target) + f = open(target, "w" + mode) + f.write(contents) + f.close() + chmod(target, 0o777 - mask) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/launcher manifest.xml b/backend/test/lib/python3.8/site-packages/setuptools/command/launcher manifest.xml new file mode 100644 index 0000000000000000000000000000000000000000..5972a96d8ded85cc14147ffc1400ec67c3b5a578 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/launcher manifest.xml @@ -0,0 +1,15 @@ +<?xml version="1.0" encoding="UTF-8" standalone="yes"?> +<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0"> + <assemblyIdentity version="1.0.0.0" + processorArchitecture="X86" + name="%(name)s" + type="win32"/> + <!-- Identify the application security requirements. --> + <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3"> + <security> + <requestedPrivileges> + <requestedExecutionLevel level="asInvoker" uiAccess="false"/> + </requestedPrivileges> + </security> + </trustInfo> +</assembly> diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/py36compat.py b/backend/test/lib/python3.8/site-packages/setuptools/command/py36compat.py new file mode 100644 index 0000000000000000000000000000000000000000..61063e7542586c05c3af21d31cd917ebd1118272 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/py36compat.py @@ -0,0 +1,136 @@ +import os +from glob import glob +from distutils.util import convert_path +from distutils.command import sdist + +from setuptools.extern.six.moves import filter + + +class sdist_add_defaults: + """ + Mix-in providing forward-compatibility for functionality as found in + distutils on Python 3.7. + + Do not edit the code in this class except to update functionality + as implemented in distutils. Instead, override in the subclass. + """ + + def add_defaults(self): + """Add all the default files to self.filelist: + - README or README.txt + - setup.py + - test/test*.py + - all pure Python modules mentioned in setup script + - all files pointed by package_data (build_py) + - all files defined in data_files. + - all files defined as scripts. + - all C sources listed as part of extensions or C libraries + in the setup script (doesn't catch C headers!) + Warns if (README or README.txt) or setup.py are missing; everything + else is optional. + """ + self._add_defaults_standards() + self._add_defaults_optional() + self._add_defaults_python() + self._add_defaults_data_files() + self._add_defaults_ext() + self._add_defaults_c_libs() + self._add_defaults_scripts() + + @staticmethod + def _cs_path_exists(fspath): + """ + Case-sensitive path existence check + + >>> sdist_add_defaults._cs_path_exists(__file__) + True + >>> sdist_add_defaults._cs_path_exists(__file__.upper()) + False + """ + if not os.path.exists(fspath): + return False + # make absolute so we always have a directory + abspath = os.path.abspath(fspath) + directory, filename = os.path.split(abspath) + return filename in os.listdir(directory) + + def _add_defaults_standards(self): + standards = [self.READMES, self.distribution.script_name] + for fn in standards: + if isinstance(fn, tuple): + alts = fn + got_it = False + for fn in alts: + if self._cs_path_exists(fn): + got_it = True + self.filelist.append(fn) + break + + if not got_it: + self.warn("standard file not found: should have one of " + + ', '.join(alts)) + else: + if self._cs_path_exists(fn): + self.filelist.append(fn) + else: + self.warn("standard file '%s' not found" % fn) + + def _add_defaults_optional(self): + optional = ['test/test*.py', 'setup.cfg'] + for pattern in optional: + files = filter(os.path.isfile, glob(pattern)) + self.filelist.extend(files) + + def _add_defaults_python(self): + # build_py is used to get: + # - python modules + # - files defined in package_data + build_py = self.get_finalized_command('build_py') + + # getting python files + if self.distribution.has_pure_modules(): + self.filelist.extend(build_py.get_source_files()) + + # getting package_data files + # (computed in build_py.data_files by build_py.finalize_options) + for pkg, src_dir, build_dir, filenames in build_py.data_files: + for filename in filenames: + self.filelist.append(os.path.join(src_dir, filename)) + + def _add_defaults_data_files(self): + # getting distribution.data_files + if self.distribution.has_data_files(): + for item in self.distribution.data_files: + if isinstance(item, str): + # plain file + item = convert_path(item) + if os.path.isfile(item): + self.filelist.append(item) + else: + # a (dirname, filenames) tuple + dirname, filenames = item + for f in filenames: + f = convert_path(f) + if os.path.isfile(f): + self.filelist.append(f) + + def _add_defaults_ext(self): + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + self.filelist.extend(build_ext.get_source_files()) + + def _add_defaults_c_libs(self): + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.filelist.extend(build_clib.get_source_files()) + + def _add_defaults_scripts(self): + if self.distribution.has_scripts(): + build_scripts = self.get_finalized_command('build_scripts') + self.filelist.extend(build_scripts.get_source_files()) + + +if hasattr(sdist.sdist, '_add_defaults_standards'): + # disable the functionality already available upstream + class sdist_add_defaults: + pass diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/register.py b/backend/test/lib/python3.8/site-packages/setuptools/command/register.py new file mode 100644 index 0000000000000000000000000000000000000000..b8266b9a60f8c363ba35f7b73befd7c9c7cb4abc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/register.py @@ -0,0 +1,18 @@ +from distutils import log +import distutils.command.register as orig + +from setuptools.errors import RemovedCommandError + + +class register(orig.register): + """Formerly used to register packages on PyPI.""" + + def run(self): + msg = ( + "The register command has been removed, use twine to upload " + + "instead (https://pypi.org/p/twine)" + ) + + self.announce("ERROR: " + msg, log.ERROR) + + raise RemovedCommandError(msg) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/rotate.py b/backend/test/lib/python3.8/site-packages/setuptools/command/rotate.py new file mode 100644 index 0000000000000000000000000000000000000000..b89353f529b3d08e768dea69a9dc8b5e7403003d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/rotate.py @@ -0,0 +1,66 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsOptionError +import os +import shutil + +from setuptools.extern import six + +from setuptools import Command + + +class rotate(Command): + """Delete older distributions""" + + description = "delete older distributions, keeping N newest files" + user_options = [ + ('match=', 'm', "patterns to match (required)"), + ('dist-dir=', 'd', "directory where the distributions are"), + ('keep=', 'k', "number of matching distributions to keep"), + ] + + boolean_options = [] + + def initialize_options(self): + self.match = None + self.dist_dir = None + self.keep = None + + def finalize_options(self): + if self.match is None: + raise DistutilsOptionError( + "Must specify one or more (comma-separated) match patterns " + "(e.g. '.zip' or '.egg')" + ) + if self.keep is None: + raise DistutilsOptionError("Must specify number of files to keep") + try: + self.keep = int(self.keep) + except ValueError: + raise DistutilsOptionError("--keep must be an integer") + if isinstance(self.match, six.string_types): + self.match = [ + convert_path(p.strip()) for p in self.match.split(',') + ] + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + + def run(self): + self.run_command("egg_info") + from glob import glob + + for pattern in self.match: + pattern = self.distribution.get_name() + '*' + pattern + files = glob(os.path.join(self.dist_dir, pattern)) + files = [(os.path.getmtime(f), f) for f in files] + files.sort() + files.reverse() + + log.info("%d file(s) matching %s", len(files), pattern) + files = files[self.keep:] + for (t, f) in files: + log.info("Deleting %s", f) + if not self.dry_run: + if os.path.isdir(f): + shutil.rmtree(f) + else: + os.unlink(f) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/saveopts.py b/backend/test/lib/python3.8/site-packages/setuptools/command/saveopts.py new file mode 100644 index 0000000000000000000000000000000000000000..611cec552867a6d50b7edd700c86c7396d906ea2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/saveopts.py @@ -0,0 +1,22 @@ +from setuptools.command.setopt import edit_config, option_base + + +class saveopts(option_base): + """Save command-line options to a file""" + + description = "save supplied options to setup.cfg or other config file" + + def run(self): + dist = self.distribution + settings = {} + + for cmd in dist.command_options: + + if cmd == 'saveopts': + continue # don't save our own options! + + for opt, (src, val) in dist.get_option_dict(cmd).items(): + if src == "command line": + settings.setdefault(cmd, {})[opt] = val + + edit_config(self.filename, settings, self.dry_run) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/sdist.py b/backend/test/lib/python3.8/site-packages/setuptools/command/sdist.py new file mode 100644 index 0000000000000000000000000000000000000000..a851453f9aa9506d307e1aa7e802fdee9e943eae --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/sdist.py @@ -0,0 +1,252 @@ +from distutils import log +import distutils.command.sdist as orig +import os +import sys +import io +import contextlib + +from setuptools.extern import six, ordered_set + +from .py36compat import sdist_add_defaults + +import pkg_resources + +_default_revctrl = list + + +def walk_revctrl(dirname=''): + """Find all files under revision control""" + for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): + for item in ep.load()(dirname): + yield item + + +class sdist(sdist_add_defaults, orig.sdist): + """Smart sdist that finds anything supported by revision control""" + + user_options = [ + ('formats=', None, + "formats for source distribution (comma-separated list)"), + ('keep-temp', 'k', + "keep the distribution tree around after creating " + + "archive file(s)"), + ('dist-dir=', 'd', + "directory to put the source distribution archive(s) in " + "[default: dist]"), + ] + + negative_opt = {} + + README_EXTENSIONS = ['', '.rst', '.txt', '.md'] + READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS) + + def run(self): + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + self.filelist = ei_cmd.filelist + self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt')) + self.check_readme() + + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + self.make_distribution() + + dist_files = getattr(self.distribution, 'dist_files', []) + for file in self.archive_files: + data = ('sdist', '', file) + if data not in dist_files: + dist_files.append(data) + + def initialize_options(self): + orig.sdist.initialize_options(self) + + self._default_to_gztar() + + def _default_to_gztar(self): + # only needed on Python prior to 3.6. + if sys.version_info >= (3, 6, 0, 'beta', 1): + return + self.formats = ['gztar'] + + def make_distribution(self): + """ + Workaround for #516 + """ + with self._remove_os_link(): + orig.sdist.make_distribution(self) + + @staticmethod + @contextlib.contextmanager + def _remove_os_link(): + """ + In a context, remove and restore os.link if it exists + """ + + class NoValue: + pass + + orig_val = getattr(os, 'link', NoValue) + try: + del os.link + except Exception: + pass + try: + yield + finally: + if orig_val is not NoValue: + setattr(os, 'link', orig_val) + + def __read_template_hack(self): + # This grody hack closes the template file (MANIFEST.in) if an + # exception occurs during read_template. + # Doing so prevents an error when easy_install attempts to delete the + # file. + try: + orig.sdist.read_template(self) + except Exception: + _, _, tb = sys.exc_info() + tb.tb_next.tb_frame.f_locals['template'].close() + raise + + # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle + # has been fixed, so only override the method if we're using an earlier + # Python. + has_leaky_handle = ( + sys.version_info < (2, 7, 2) + or (3, 0) <= sys.version_info < (3, 1, 4) + or (3, 2) <= sys.version_info < (3, 2, 1) + ) + if has_leaky_handle: + read_template = __read_template_hack + + def _add_defaults_optional(self): + if six.PY2: + sdist_add_defaults._add_defaults_optional(self) + else: + super()._add_defaults_optional() + if os.path.isfile('pyproject.toml'): + self.filelist.append('pyproject.toml') + + def _add_defaults_python(self): + """getting python files""" + if self.distribution.has_pure_modules(): + build_py = self.get_finalized_command('build_py') + self.filelist.extend(build_py.get_source_files()) + self._add_data_files(self._safe_data_files(build_py)) + + def _safe_data_files(self, build_py): + """ + Extracting data_files from build_py is known to cause + infinite recursion errors when `include_package_data` + is enabled, so suppress it in that case. + """ + if self.distribution.include_package_data: + return () + return build_py.data_files + + def _add_data_files(self, data_files): + """ + Add data files as found in build_py.data_files. + """ + self.filelist.extend( + os.path.join(src_dir, name) + for _, src_dir, _, filenames in data_files + for name in filenames + ) + + def _add_defaults_data_files(self): + try: + if six.PY2: + sdist_add_defaults._add_defaults_data_files(self) + else: + super()._add_defaults_data_files() + except TypeError: + log.warn("data_files contains unexpected objects") + + def check_readme(self): + for f in self.READMES: + if os.path.exists(f): + return + else: + self.warn( + "standard file not found: should have one of " + + ', '.join(self.READMES) + ) + + def make_release_tree(self, base_dir, files): + orig.sdist.make_release_tree(self, base_dir, files) + + # Save any egg_info command line options used to create this sdist + dest = os.path.join(base_dir, 'setup.cfg') + if hasattr(os, 'link') and os.path.exists(dest): + # unlink and re-copy, since it might be hard-linked, and + # we don't want to change the source version + os.unlink(dest) + self.copy_file('setup.cfg', dest) + + self.get_finalized_command('egg_info').save_version_info(dest) + + def _manifest_is_not_generated(self): + # check for special comment used in 2.7.1 and higher + if not os.path.isfile(self.manifest): + return False + + with io.open(self.manifest, 'rb') as fp: + first_line = fp.readline() + return (first_line != + '# file GENERATED by distutils, do NOT edit\n'.encode()) + + def read_manifest(self): + """Read the manifest file (named by 'self.manifest') and use it to + fill in 'self.filelist', the list of files to include in the source + distribution. + """ + log.info("reading manifest file '%s'", self.manifest) + manifest = open(self.manifest, 'rb') + for line in manifest: + # The manifest must contain UTF-8. See #303. + if six.PY3: + try: + line = line.decode('UTF-8') + except UnicodeDecodeError: + log.warn("%r not UTF-8 decodable -- skipping" % line) + continue + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue + self.filelist.append(line) + manifest.close() + + def check_license(self): + """Checks if license_file' or 'license_files' is configured and adds any + valid paths to 'self.filelist'. + """ + + files = ordered_set.OrderedSet() + + opts = self.distribution.get_option_dict('metadata') + + # ignore the source of the value + _, license_file = opts.get('license_file', (None, None)) + + if license_file is None: + log.debug("'license_file' option was not specified") + else: + files.add(license_file) + + try: + files.update(self.distribution.metadata.license_files) + except TypeError: + log.warn("warning: 'license_files' option is malformed") + + for f in files: + if not os.path.exists(f): + log.warn( + "warning: Failed to find the configured license file '%s'", + f) + files.remove(f) + + self.filelist.extend(files) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/setopt.py b/backend/test/lib/python3.8/site-packages/setuptools/command/setopt.py new file mode 100644 index 0000000000000000000000000000000000000000..7e57cc02627fc3c3bb49613731a51c72452f96ba --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/setopt.py @@ -0,0 +1,149 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsOptionError +import distutils +import os + +from setuptools.extern.six.moves import configparser + +from setuptools import Command + +__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] + + +def config_file(kind="local"): + """Get the filename of the distutils, local, global, or per-user config + + `kind` must be one of "local", "global", or "user" + """ + if kind == 'local': + return 'setup.cfg' + if kind == 'global': + return os.path.join( + os.path.dirname(distutils.__file__), 'distutils.cfg' + ) + if kind == 'user': + dot = os.name == 'posix' and '.' or '' + return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) + raise ValueError( + "config_file() type must be 'local', 'global', or 'user'", kind + ) + + +def edit_config(filename, settings, dry_run=False): + """Edit a configuration file to include `settings` + + `settings` is a dictionary of dictionaries or ``None`` values, keyed by + command/section name. A ``None`` value means to delete the entire section, + while a dictionary lists settings to be changed or deleted in that section. + A setting of ``None`` means to delete that setting. + """ + log.debug("Reading configuration from %s", filename) + opts = configparser.RawConfigParser() + opts.read([filename]) + for section, options in settings.items(): + if options is None: + log.info("Deleting section [%s] from %s", section, filename) + opts.remove_section(section) + else: + if not opts.has_section(section): + log.debug("Adding new section [%s] to %s", section, filename) + opts.add_section(section) + for option, value in options.items(): + if value is None: + log.debug( + "Deleting %s.%s from %s", + section, option, filename + ) + opts.remove_option(section, option) + if not opts.options(section): + log.info("Deleting empty [%s] section from %s", + section, filename) + opts.remove_section(section) + else: + log.debug( + "Setting %s.%s to %r in %s", + section, option, value, filename + ) + opts.set(section, option, value) + + log.info("Writing %s", filename) + if not dry_run: + with open(filename, 'w') as f: + opts.write(f) + + +class option_base(Command): + """Abstract base class for commands that mess with config files""" + + user_options = [ + ('global-config', 'g', + "save options to the site-wide distutils.cfg file"), + ('user-config', 'u', + "save options to the current user's pydistutils.cfg file"), + ('filename=', 'f', + "configuration file to use (default=setup.cfg)"), + ] + + boolean_options = [ + 'global-config', 'user-config', + ] + + def initialize_options(self): + self.global_config = None + self.user_config = None + self.filename = None + + def finalize_options(self): + filenames = [] + if self.global_config: + filenames.append(config_file('global')) + if self.user_config: + filenames.append(config_file('user')) + if self.filename is not None: + filenames.append(self.filename) + if not filenames: + filenames.append(config_file('local')) + if len(filenames) > 1: + raise DistutilsOptionError( + "Must specify only one configuration file option", + filenames + ) + self.filename, = filenames + + +class setopt(option_base): + """Save command-line options to a file""" + + description = "set an option in setup.cfg or another config file" + + user_options = [ + ('command=', 'c', 'command to set an option for'), + ('option=', 'o', 'option to set'), + ('set-value=', 's', 'value of the option'), + ('remove', 'r', 'remove (unset) the value'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.command = None + self.option = None + self.set_value = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.command is None or self.option is None: + raise DistutilsOptionError("Must specify --command *and* --option") + if self.set_value is None and not self.remove: + raise DistutilsOptionError("Must specify --set-value or --remove") + + def run(self): + edit_config( + self.filename, { + self.command: {self.option.replace('-', '_'): self.set_value} + }, + self.dry_run + ) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/test.py b/backend/test/lib/python3.8/site-packages/setuptools/command/test.py new file mode 100644 index 0000000000000000000000000000000000000000..c148b38d10c7691c2045520e5aedb60293dd714d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/test.py @@ -0,0 +1,279 @@ +import os +import operator +import sys +import contextlib +import itertools +import unittest +from distutils.errors import DistutilsError, DistutilsOptionError +from distutils import log +from unittest import TestLoader + +from setuptools.extern import six +from setuptools.extern.six.moves import map, filter + +from pkg_resources import (resource_listdir, resource_exists, normalize_path, + working_set, _namespace_packages, evaluate_marker, + add_activation_listener, require, EntryPoint) +from setuptools import Command +from .build_py import _unique_everseen + +__metaclass__ = type + + +class ScanningLoader(TestLoader): + + def __init__(self): + TestLoader.__init__(self) + self._visited = set() + + def loadTestsFromModule(self, module, pattern=None): + """Return a suite of all tests cases contained in the given module + + If the module is a package, load tests from all the modules in it. + If the module has an ``additional_tests`` function, call it and add + the return value to the tests. + """ + if module in self._visited: + return None + self._visited.add(module) + + tests = [] + tests.append(TestLoader.loadTestsFromModule(self, module)) + + if hasattr(module, "additional_tests"): + tests.append(module.additional_tests()) + + if hasattr(module, '__path__'): + for file in resource_listdir(module.__name__, ''): + if file.endswith('.py') and file != '__init__.py': + submodule = module.__name__ + '.' + file[:-3] + else: + if resource_exists(module.__name__, file + '/__init__.py'): + submodule = module.__name__ + '.' + file + else: + continue + tests.append(self.loadTestsFromName(submodule)) + + if len(tests) != 1: + return self.suiteClass(tests) + else: + return tests[0] # don't create a nested suite for only one return + + +# adapted from jaraco.classes.properties:NonDataProperty +class NonDataProperty: + def __init__(self, fget): + self.fget = fget + + def __get__(self, obj, objtype=None): + if obj is None: + return self + return self.fget(obj) + + +class test(Command): + """Command to run unit tests after in-place build""" + + description = "run unit tests after in-place build (deprecated)" + + user_options = [ + ('test-module=', 'm', "Run 'test_suite' in specified module"), + ('test-suite=', 's', + "Run single test, case or suite (e.g. 'module.test_suite')"), + ('test-runner=', 'r', "Test runner to use"), + ] + + def initialize_options(self): + self.test_suite = None + self.test_module = None + self.test_loader = None + self.test_runner = None + + def finalize_options(self): + + if self.test_suite and self.test_module: + msg = "You may specify a module or a suite, but not both" + raise DistutilsOptionError(msg) + + if self.test_suite is None: + if self.test_module is None: + self.test_suite = self.distribution.test_suite + else: + self.test_suite = self.test_module + ".test_suite" + + if self.test_loader is None: + self.test_loader = getattr(self.distribution, 'test_loader', None) + if self.test_loader is None: + self.test_loader = "setuptools.command.test:ScanningLoader" + if self.test_runner is None: + self.test_runner = getattr(self.distribution, 'test_runner', None) + + @NonDataProperty + def test_args(self): + return list(self._test_args()) + + def _test_args(self): + if not self.test_suite and sys.version_info >= (2, 7): + yield 'discover' + if self.verbose: + yield '--verbose' + if self.test_suite: + yield self.test_suite + + def with_project_on_sys_path(self, func): + """ + Backward compatibility for project_on_sys_path context. + """ + with self.project_on_sys_path(): + func() + + @contextlib.contextmanager + def project_on_sys_path(self, include_dists=[]): + with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False) + + if with_2to3: + # If we run 2to3 we can not do this inplace: + + # Ensure metadata is up-to-date + self.reinitialize_command('build_py', inplace=0) + self.run_command('build_py') + bpy_cmd = self.get_finalized_command("build_py") + build_path = normalize_path(bpy_cmd.build_lib) + + # Build extensions + self.reinitialize_command('egg_info', egg_base=build_path) + self.run_command('egg_info') + + self.reinitialize_command('build_ext', inplace=0) + self.run_command('build_ext') + else: + # Without 2to3 inplace works fine: + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + ei_cmd = self.get_finalized_command("egg_info") + + old_path = sys.path[:] + old_modules = sys.modules.copy() + + try: + project_path = normalize_path(ei_cmd.egg_base) + sys.path.insert(0, project_path) + working_set.__init__() + add_activation_listener(lambda dist: dist.activate()) + require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) + with self.paths_on_pythonpath([project_path]): + yield + finally: + sys.path[:] = old_path + sys.modules.clear() + sys.modules.update(old_modules) + working_set.__init__() + + @staticmethod + @contextlib.contextmanager + def paths_on_pythonpath(paths): + """ + Add the indicated paths to the head of the PYTHONPATH environment + variable so that subprocesses will also see the packages at + these paths. + + Do this in a context that restores the value on exit. + """ + nothing = object() + orig_pythonpath = os.environ.get('PYTHONPATH', nothing) + current_pythonpath = os.environ.get('PYTHONPATH', '') + try: + prefix = os.pathsep.join(_unique_everseen(paths)) + to_join = filter(None, [prefix, current_pythonpath]) + new_path = os.pathsep.join(to_join) + if new_path: + os.environ['PYTHONPATH'] = new_path + yield + finally: + if orig_pythonpath is nothing: + os.environ.pop('PYTHONPATH', None) + else: + os.environ['PYTHONPATH'] = orig_pythonpath + + @staticmethod + def install_dists(dist): + """ + Install the requirements indicated by self.distribution and + return an iterable of the dists that were built. + """ + ir_d = dist.fetch_build_eggs(dist.install_requires) + tr_d = dist.fetch_build_eggs(dist.tests_require or []) + er_d = dist.fetch_build_eggs( + v for k, v in dist.extras_require.items() + if k.startswith(':') and evaluate_marker(k[1:]) + ) + return itertools.chain(ir_d, tr_d, er_d) + + def run(self): + self.announce( + "WARNING: Testing via this command is deprecated and will be " + "removed in a future version. Users looking for a generic test " + "entry point independent of test runner are encouraged to use " + "tox.", + log.WARN, + ) + + installed_dists = self.install_dists(self.distribution) + + cmd = ' '.join(self._argv) + if self.dry_run: + self.announce('skipping "%s" (dry run)' % cmd) + return + + self.announce('running "%s"' % cmd) + + paths = map(operator.attrgetter('location'), installed_dists) + with self.paths_on_pythonpath(paths): + with self.project_on_sys_path(): + self.run_tests() + + def run_tests(self): + # Purge modules under test from sys.modules. The test loader will + # re-import them from the build location. Required when 2to3 is used + # with namespace packages. + if six.PY3 and getattr(self.distribution, 'use_2to3', False): + module = self.test_suite.split('.')[0] + if module in _namespace_packages: + del_modules = [] + if module in sys.modules: + del_modules.append(module) + module += '.' + for name in sys.modules: + if name.startswith(module): + del_modules.append(name) + list(map(sys.modules.__delitem__, del_modules)) + + test = unittest.main( + None, None, self._argv, + testLoader=self._resolve_as_ep(self.test_loader), + testRunner=self._resolve_as_ep(self.test_runner), + exit=False, + ) + if not test.result.wasSuccessful(): + msg = 'Test failed: %s' % test.result + self.announce(msg, log.ERROR) + raise DistutilsError(msg) + + @property + def _argv(self): + return ['unittest'] + self.test_args + + @staticmethod + def _resolve_as_ep(val): + """ + Load the indicated attribute value, called, as a as if it were + specified as an entry point. + """ + if val is None: + return + parsed = EntryPoint.parse("x=" + val) + return parsed.resolve()() diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/upload.py b/backend/test/lib/python3.8/site-packages/setuptools/command/upload.py new file mode 100644 index 0000000000000000000000000000000000000000..ec7f81e22772511d668e5ab92f625db33259e803 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/upload.py @@ -0,0 +1,17 @@ +from distutils import log +from distutils.command import upload as orig + +from setuptools.errors import RemovedCommandError + + +class upload(orig.upload): + """Formerly used to upload packages to PyPI.""" + + def run(self): + msg = ( + "The upload command has been removed, use twine to upload " + + "instead (https://pypi.org/p/twine)" + ) + + self.announce("ERROR: " + msg, log.ERROR) + raise RemovedCommandError(msg) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/command/upload_docs.py b/backend/test/lib/python3.8/site-packages/setuptools/command/upload_docs.py new file mode 100644 index 0000000000000000000000000000000000000000..07aa564af451ce41d818d72f8ee93cb46887cecf --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/command/upload_docs.py @@ -0,0 +1,206 @@ +# -*- coding: utf-8 -*- +"""upload_docs + +Implements a Distutils 'upload_docs' subcommand (upload documentation to +PyPI's pythonhosted.org). +""" + +from base64 import standard_b64encode +from distutils import log +from distutils.errors import DistutilsOptionError +import os +import socket +import zipfile +import tempfile +import shutil +import itertools +import functools + +from setuptools.extern import six +from setuptools.extern.six.moves import http_client, urllib + +from pkg_resources import iter_entry_points +from .upload import upload + + +def _encode(s): + errors = 'surrogateescape' if six.PY3 else 'strict' + return s.encode('utf-8', errors) + + +class upload_docs(upload): + # override the default repository as upload_docs isn't + # supported by Warehouse (and won't be). + DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/' + + description = 'Upload documentation to PyPI' + + user_options = [ + ('repository=', 'r', + "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), + ('show-response', None, + 'display full response text from server'), + ('upload-dir=', None, 'directory to upload'), + ] + boolean_options = upload.boolean_options + + def has_sphinx(self): + if self.upload_dir is None: + for ep in iter_entry_points('distutils.commands', 'build_sphinx'): + return True + + sub_commands = [('build_sphinx', has_sphinx)] + + def initialize_options(self): + upload.initialize_options(self) + self.upload_dir = None + self.target_dir = None + + def finalize_options(self): + upload.finalize_options(self) + if self.upload_dir is None: + if self.has_sphinx(): + build_sphinx = self.get_finalized_command('build_sphinx') + self.target_dir = build_sphinx.builder_target_dir + else: + build = self.get_finalized_command('build') + self.target_dir = os.path.join(build.build_base, 'docs') + else: + self.ensure_dirname('upload_dir') + self.target_dir = self.upload_dir + if 'pypi.python.org' in self.repository: + log.warn("Upload_docs command is deprecated. Use RTD instead.") + self.announce('Using upload directory %s' % self.target_dir) + + def create_zipfile(self, filename): + zip_file = zipfile.ZipFile(filename, "w") + try: + self.mkpath(self.target_dir) # just in case + for root, dirs, files in os.walk(self.target_dir): + if root == self.target_dir and not files: + tmpl = "no files found in upload directory '%s'" + raise DistutilsOptionError(tmpl % self.target_dir) + for name in files: + full = os.path.join(root, name) + relative = root[len(self.target_dir):].lstrip(os.path.sep) + dest = os.path.join(relative, name) + zip_file.write(full, dest) + finally: + zip_file.close() + + def run(self): + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + tmp_dir = tempfile.mkdtemp() + name = self.distribution.metadata.get_name() + zip_file = os.path.join(tmp_dir, "%s.zip" % name) + try: + self.create_zipfile(zip_file) + self.upload_file(zip_file) + finally: + shutil.rmtree(tmp_dir) + + @staticmethod + def _build_part(item, sep_boundary): + key, values = item + title = '\nContent-Disposition: form-data; name="%s"' % key + # handle multiple entries for the same name + if not isinstance(values, list): + values = [values] + for value in values: + if isinstance(value, tuple): + title += '; filename="%s"' % value[0] + value = value[1] + else: + value = _encode(value) + yield sep_boundary + yield _encode(title) + yield b"\n\n" + yield value + if value and value[-1:] == b'\r': + yield b'\n' # write an extra newline (lurve Macs) + + @classmethod + def _build_multipart(cls, data): + """ + Build up the MIME payload for the POST data + """ + boundary = b'--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = b'\n--' + boundary + end_boundary = sep_boundary + b'--' + end_items = end_boundary, b"\n", + builder = functools.partial( + cls._build_part, + sep_boundary=sep_boundary, + ) + part_groups = map(builder, data.items()) + parts = itertools.chain.from_iterable(part_groups) + body_items = itertools.chain(parts, end_items) + content_type = 'multipart/form-data; boundary=%s' % boundary.decode('ascii') + return b''.join(body_items), content_type + + def upload_file(self, filename): + with open(filename, 'rb') as f: + content = f.read() + meta = self.distribution.metadata + data = { + ':action': 'doc_upload', + 'name': meta.get_name(), + 'content': (os.path.basename(filename), content), + } + # set up the authentication + credentials = _encode(self.username + ':' + self.password) + credentials = standard_b64encode(credentials) + if six.PY3: + credentials = credentials.decode('ascii') + auth = "Basic " + credentials + + body, ct = self._build_multipart(data) + + msg = "Submitting documentation to %s" % (self.repository) + self.announce(msg, log.INFO) + + # build the Request + # We can't use urllib2 since we need to send the Basic + # auth right with the first request + schema, netloc, url, params, query, fragments = \ + urllib.parse.urlparse(self.repository) + assert not params and not query and not fragments + if schema == 'http': + conn = http_client.HTTPConnection(netloc) + elif schema == 'https': + conn = http_client.HTTPSConnection(netloc) + else: + raise AssertionError("unsupported schema " + schema) + + data = '' + try: + conn.connect() + conn.putrequest("POST", url) + content_type = ct + conn.putheader('Content-type', content_type) + conn.putheader('Content-length', str(len(body))) + conn.putheader('Authorization', auth) + conn.endheaders() + conn.send(body) + except socket.error as e: + self.announce(str(e), log.ERROR) + return + + r = conn.getresponse() + if r.status == 200: + msg = 'Server response (%s): %s' % (r.status, r.reason) + self.announce(msg, log.INFO) + elif r.status == 301: + location = r.getheader('Location') + if location is None: + location = 'https://pythonhosted.org/%s/' % meta.get_name() + msg = 'Upload successful. Visit %s' % location + self.announce(msg, log.INFO) + else: + msg = 'Upload failed (%s): %s' % (r.status, r.reason) + self.announce(msg, log.ERROR) + if self.show_response: + print('-' * 75, r.read(), '-' * 75) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/config.py b/backend/test/lib/python3.8/site-packages/setuptools/config.py new file mode 100644 index 0000000000000000000000000000000000000000..9b9a0c45e756b44ddea7660228934d0a37fcd97c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/config.py @@ -0,0 +1,659 @@ +from __future__ import absolute_import, unicode_literals +import io +import os +import sys + +import warnings +import functools +from collections import defaultdict +from functools import partial +from functools import wraps +from importlib import import_module + +from distutils.errors import DistutilsOptionError, DistutilsFileError +from setuptools.extern.packaging.version import LegacyVersion, parse +from setuptools.extern.packaging.specifiers import SpecifierSet +from setuptools.extern.six import string_types, PY3 + + +__metaclass__ = type + + +def read_configuration( + filepath, find_others=False, ignore_option_errors=False): + """Read given configuration file and returns options from it as a dict. + + :param str|unicode filepath: Path to configuration file + to get options from. + + :param bool find_others: Whether to search for other configuration files + which could be on in various places. + + :param bool ignore_option_errors: Whether to silently ignore + options, values of which could not be resolved (e.g. due to exceptions + in directives such as file:, attr:, etc.). + If False exceptions are propagated as expected. + + :rtype: dict + """ + from setuptools.dist import Distribution, _Distribution + + filepath = os.path.abspath(filepath) + + if not os.path.isfile(filepath): + raise DistutilsFileError( + 'Configuration file %s does not exist.' % filepath) + + current_directory = os.getcwd() + os.chdir(os.path.dirname(filepath)) + + try: + dist = Distribution() + + filenames = dist.find_config_files() if find_others else [] + if filepath not in filenames: + filenames.append(filepath) + + _Distribution.parse_config_files(dist, filenames=filenames) + + handlers = parse_configuration( + dist, dist.command_options, + ignore_option_errors=ignore_option_errors) + + finally: + os.chdir(current_directory) + + return configuration_to_dict(handlers) + + +def _get_option(target_obj, key): + """ + Given a target object and option key, get that option from + the target object, either through a get_{key} method or + from an attribute directly. + """ + getter_name = 'get_{key}'.format(**locals()) + by_attribute = functools.partial(getattr, target_obj, key) + getter = getattr(target_obj, getter_name, by_attribute) + return getter() + + +def configuration_to_dict(handlers): + """Returns configuration data gathered by given handlers as a dict. + + :param list[ConfigHandler] handlers: Handlers list, + usually from parse_configuration() + + :rtype: dict + """ + config_dict = defaultdict(dict) + + for handler in handlers: + for option in handler.set_options: + value = _get_option(handler.target_obj, option) + config_dict[handler.section_prefix][option] = value + + return config_dict + + +def parse_configuration( + distribution, command_options, ignore_option_errors=False): + """Performs additional parsing of configuration options + for a distribution. + + Returns a list of used option handlers. + + :param Distribution distribution: + :param dict command_options: + :param bool ignore_option_errors: Whether to silently ignore + options, values of which could not be resolved (e.g. due to exceptions + in directives such as file:, attr:, etc.). + If False exceptions are propagated as expected. + :rtype: list + """ + options = ConfigOptionsHandler( + distribution, command_options, ignore_option_errors) + options.parse() + + meta = ConfigMetadataHandler( + distribution.metadata, command_options, ignore_option_errors, + distribution.package_dir) + meta.parse() + + return meta, options + + +class ConfigHandler: + """Handles metadata supplied in configuration files.""" + + section_prefix = None + """Prefix for config sections handled by this handler. + Must be provided by class heirs. + + """ + + aliases = {} + """Options aliases. + For compatibility with various packages. E.g.: d2to1 and pbr. + Note: `-` in keys is replaced with `_` by config parser. + + """ + + def __init__(self, target_obj, options, ignore_option_errors=False): + sections = {} + + section_prefix = self.section_prefix + for section_name, section_options in options.items(): + if not section_name.startswith(section_prefix): + continue + + section_name = section_name.replace(section_prefix, '').strip('.') + sections[section_name] = section_options + + self.ignore_option_errors = ignore_option_errors + self.target_obj = target_obj + self.sections = sections + self.set_options = [] + + @property + def parsers(self): + """Metadata item name to parser function mapping.""" + raise NotImplementedError( + '%s must provide .parsers property' % self.__class__.__name__) + + def __setitem__(self, option_name, value): + unknown = tuple() + target_obj = self.target_obj + + # Translate alias into real name. + option_name = self.aliases.get(option_name, option_name) + + current_value = getattr(target_obj, option_name, unknown) + + if current_value is unknown: + raise KeyError(option_name) + + if current_value: + # Already inhabited. Skipping. + return + + skip_option = False + parser = self.parsers.get(option_name) + if parser: + try: + value = parser(value) + + except Exception: + skip_option = True + if not self.ignore_option_errors: + raise + + if skip_option: + return + + setter = getattr(target_obj, 'set_%s' % option_name, None) + if setter is None: + setattr(target_obj, option_name, value) + else: + setter(value) + + self.set_options.append(option_name) + + @classmethod + def _parse_list(cls, value, separator=','): + """Represents value as a list. + + Value is split either by separator (defaults to comma) or by lines. + + :param value: + :param separator: List items separator character. + :rtype: list + """ + if isinstance(value, list): # _get_parser_compound case + return value + + if '\n' in value: + value = value.splitlines() + else: + value = value.split(separator) + + return [chunk.strip() for chunk in value if chunk.strip()] + + @classmethod + def _parse_dict(cls, value): + """Represents value as a dict. + + :param value: + :rtype: dict + """ + separator = '=' + result = {} + for line in cls._parse_list(value): + key, sep, val = line.partition(separator) + if sep != separator: + raise DistutilsOptionError( + 'Unable to parse option value to dict: %s' % value) + result[key.strip()] = val.strip() + + return result + + @classmethod + def _parse_bool(cls, value): + """Represents value as boolean. + + :param value: + :rtype: bool + """ + value = value.lower() + return value in ('1', 'true', 'yes') + + @classmethod + def _exclude_files_parser(cls, key): + """Returns a parser function to make sure field inputs + are not files. + + Parses a value after getting the key so error messages are + more informative. + + :param key: + :rtype: callable + """ + def parser(value): + exclude_directive = 'file:' + if value.startswith(exclude_directive): + raise ValueError( + 'Only strings are accepted for the {0} field, ' + 'files are not accepted'.format(key)) + return value + return parser + + @classmethod + def _parse_file(cls, value): + """Represents value as a string, allowing including text + from nearest files using `file:` directive. + + Directive is sandboxed and won't reach anything outside + directory with setup.py. + + Examples: + file: README.rst, CHANGELOG.md, src/file.txt + + :param str value: + :rtype: str + """ + include_directive = 'file:' + + if not isinstance(value, string_types): + return value + + if not value.startswith(include_directive): + return value + + spec = value[len(include_directive):] + filepaths = (os.path.abspath(path.strip()) for path in spec.split(',')) + return '\n'.join( + cls._read_file(path) + for path in filepaths + if (cls._assert_local(path) or True) + and os.path.isfile(path) + ) + + @staticmethod + def _assert_local(filepath): + if not filepath.startswith(os.getcwd()): + raise DistutilsOptionError( + '`file:` directive can not access %s' % filepath) + + @staticmethod + def _read_file(filepath): + with io.open(filepath, encoding='utf-8') as f: + return f.read() + + @classmethod + def _parse_attr(cls, value, package_dir=None): + """Represents value as a module attribute. + + Examples: + attr: package.attr + attr: package.module.attr + + :param str value: + :rtype: str + """ + attr_directive = 'attr:' + if not value.startswith(attr_directive): + return value + + attrs_path = value.replace(attr_directive, '').strip().split('.') + attr_name = attrs_path.pop() + + module_name = '.'.join(attrs_path) + module_name = module_name or '__init__' + + parent_path = os.getcwd() + if package_dir: + if attrs_path[0] in package_dir: + # A custom path was specified for the module we want to import + custom_path = package_dir[attrs_path[0]] + parts = custom_path.rsplit('/', 1) + if len(parts) > 1: + parent_path = os.path.join(os.getcwd(), parts[0]) + module_name = parts[1] + else: + module_name = custom_path + elif '' in package_dir: + # A custom parent directory was specified for all root modules + parent_path = os.path.join(os.getcwd(), package_dir['']) + sys.path.insert(0, parent_path) + try: + module = import_module(module_name) + value = getattr(module, attr_name) + + finally: + sys.path = sys.path[1:] + + return value + + @classmethod + def _get_parser_compound(cls, *parse_methods): + """Returns parser function to represents value as a list. + + Parses a value applying given methods one after another. + + :param parse_methods: + :rtype: callable + """ + def parse(value): + parsed = value + + for method in parse_methods: + parsed = method(parsed) + + return parsed + + return parse + + @classmethod + def _parse_section_to_dict(cls, section_options, values_parser=None): + """Parses section options into a dictionary. + + Optionally applies a given parser to values. + + :param dict section_options: + :param callable values_parser: + :rtype: dict + """ + value = {} + values_parser = values_parser or (lambda val: val) + for key, (_, val) in section_options.items(): + value[key] = values_parser(val) + return value + + def parse_section(self, section_options): + """Parses configuration file section. + + :param dict section_options: + """ + for (name, (_, value)) in section_options.items(): + try: + self[name] = value + + except KeyError: + pass # Keep silent for a new option may appear anytime. + + def parse(self): + """Parses configuration file items from one + or more related sections. + + """ + for section_name, section_options in self.sections.items(): + + method_postfix = '' + if section_name: # [section.option] variant + method_postfix = '_%s' % section_name + + section_parser_method = getattr( + self, + # Dots in section names are translated into dunderscores. + ('parse_section%s' % method_postfix).replace('.', '__'), + None) + + if section_parser_method is None: + raise DistutilsOptionError( + 'Unsupported distribution option section: [%s.%s]' % ( + self.section_prefix, section_name)) + + section_parser_method(section_options) + + def _deprecated_config_handler(self, func, msg, warning_class): + """ this function will wrap around parameters that are deprecated + + :param msg: deprecation message + :param warning_class: class of warning exception to be raised + :param func: function to be wrapped around + """ + @wraps(func) + def config_handler(*args, **kwargs): + warnings.warn(msg, warning_class) + return func(*args, **kwargs) + + return config_handler + + +class ConfigMetadataHandler(ConfigHandler): + + section_prefix = 'metadata' + + aliases = { + 'home_page': 'url', + 'summary': 'description', + 'classifier': 'classifiers', + 'platform': 'platforms', + } + + strict_mode = False + """We need to keep it loose, to be partially compatible with + `pbr` and `d2to1` packages which also uses `metadata` section. + + """ + + def __init__(self, target_obj, options, ignore_option_errors=False, + package_dir=None): + super(ConfigMetadataHandler, self).__init__(target_obj, options, + ignore_option_errors) + self.package_dir = package_dir + + @property + def parsers(self): + """Metadata item name to parser function mapping.""" + parse_list = self._parse_list + parse_file = self._parse_file + parse_dict = self._parse_dict + exclude_files_parser = self._exclude_files_parser + + return { + 'platforms': parse_list, + 'keywords': parse_list, + 'provides': parse_list, + 'requires': self._deprecated_config_handler( + parse_list, + "The requires parameter is deprecated, please use " + "install_requires for runtime dependencies.", + DeprecationWarning), + 'obsoletes': parse_list, + 'classifiers': self._get_parser_compound(parse_file, parse_list), + 'license': exclude_files_parser('license'), + 'license_files': parse_list, + 'description': parse_file, + 'long_description': parse_file, + 'version': self._parse_version, + 'project_urls': parse_dict, + } + + def _parse_version(self, value): + """Parses `version` option value. + + :param value: + :rtype: str + + """ + version = self._parse_file(value) + + if version != value: + version = version.strip() + # Be strict about versions loaded from file because it's easy to + # accidentally include newlines and other unintended content + if isinstance(parse(version), LegacyVersion): + tmpl = ( + 'Version loaded from {value} does not ' + 'comply with PEP 440: {version}' + ) + raise DistutilsOptionError(tmpl.format(**locals())) + + return version + + version = self._parse_attr(value, self.package_dir) + + if callable(version): + version = version() + + if not isinstance(version, string_types): + if hasattr(version, '__iter__'): + version = '.'.join(map(str, version)) + else: + version = '%s' % version + + return version + + +class ConfigOptionsHandler(ConfigHandler): + + section_prefix = 'options' + + @property + def parsers(self): + """Metadata item name to parser function mapping.""" + parse_list = self._parse_list + parse_list_semicolon = partial(self._parse_list, separator=';') + parse_bool = self._parse_bool + parse_dict = self._parse_dict + + return { + 'zip_safe': parse_bool, + 'use_2to3': parse_bool, + 'include_package_data': parse_bool, + 'package_dir': parse_dict, + 'use_2to3_fixers': parse_list, + 'use_2to3_exclude_fixers': parse_list, + 'convert_2to3_doctests': parse_list, + 'scripts': parse_list, + 'eager_resources': parse_list, + 'dependency_links': parse_list, + 'namespace_packages': parse_list, + 'install_requires': parse_list_semicolon, + 'setup_requires': parse_list_semicolon, + 'tests_require': parse_list_semicolon, + 'packages': self._parse_packages, + 'entry_points': self._parse_file, + 'py_modules': parse_list, + 'python_requires': SpecifierSet, + } + + def _parse_packages(self, value): + """Parses `packages` option value. + + :param value: + :rtype: list + """ + find_directives = ['find:', 'find_namespace:'] + trimmed_value = value.strip() + + if trimmed_value not in find_directives: + return self._parse_list(value) + + findns = trimmed_value == find_directives[1] + if findns and not PY3: + raise DistutilsOptionError( + 'find_namespace: directive is unsupported on Python < 3.3') + + # Read function arguments from a dedicated section. + find_kwargs = self.parse_section_packages__find( + self.sections.get('packages.find', {})) + + if findns: + from setuptools import find_namespace_packages as find_packages + else: + from setuptools import find_packages + + return find_packages(**find_kwargs) + + def parse_section_packages__find(self, section_options): + """Parses `packages.find` configuration file section. + + To be used in conjunction with _parse_packages(). + + :param dict section_options: + """ + section_data = self._parse_section_to_dict( + section_options, self._parse_list) + + valid_keys = ['where', 'include', 'exclude'] + + find_kwargs = dict( + [(k, v) for k, v in section_data.items() if k in valid_keys and v]) + + where = find_kwargs.get('where') + if where is not None: + find_kwargs['where'] = where[0] # cast list to single val + + return find_kwargs + + def parse_section_entry_points(self, section_options): + """Parses `entry_points` configuration file section. + + :param dict section_options: + """ + parsed = self._parse_section_to_dict(section_options, self._parse_list) + self['entry_points'] = parsed + + def _parse_package_data(self, section_options): + parsed = self._parse_section_to_dict(section_options, self._parse_list) + + root = parsed.get('*') + if root: + parsed[''] = root + del parsed['*'] + + return parsed + + def parse_section_package_data(self, section_options): + """Parses `package_data` configuration file section. + + :param dict section_options: + """ + self['package_data'] = self._parse_package_data(section_options) + + def parse_section_exclude_package_data(self, section_options): + """Parses `exclude_package_data` configuration file section. + + :param dict section_options: + """ + self['exclude_package_data'] = self._parse_package_data( + section_options) + + def parse_section_extras_require(self, section_options): + """Parses `extras_require` configuration file section. + + :param dict section_options: + """ + parse_list = partial(self._parse_list, separator=';') + self['extras_require'] = self._parse_section_to_dict( + section_options, parse_list) + + def parse_section_data_files(self, section_options): + """Parses `data_files` configuration file section. + + :param dict section_options: + """ + parsed = self._parse_section_to_dict(section_options, self._parse_list) + self['data_files'] = [(k, v) for k, v in parsed.items()] diff --git a/backend/test/lib/python3.8/site-packages/setuptools/dep_util.py b/backend/test/lib/python3.8/site-packages/setuptools/dep_util.py new file mode 100644 index 0000000000000000000000000000000000000000..2931c13ec35aa60b742ac4c46ceabd4ed32a5511 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/dep_util.py @@ -0,0 +1,23 @@ +from distutils.dep_util import newer_group + +# yes, this is was almost entirely copy-pasted from +# 'newer_pairwise()', this is just another convenience +# function. +def newer_pairwise_group(sources_groups, targets): + """Walk both arguments in parallel, testing if each source group is newer + than its corresponding target. Returns a pair of lists (sources_groups, + targets) where sources is newer than target, according to the semantics + of 'newer_group()'. + """ + if len(sources_groups) != len(targets): + raise ValueError("'sources_group' and 'targets' must be the same length") + + # build a pair of lists (sources_groups, targets) where source is newer + n_sources = [] + n_targets = [] + for i in range(len(sources_groups)): + if newer_group(sources_groups[i], targets[i]): + n_sources.append(sources_groups[i]) + n_targets.append(targets[i]) + + return n_sources, n_targets diff --git a/backend/test/lib/python3.8/site-packages/setuptools/depends.py b/backend/test/lib/python3.8/site-packages/setuptools/depends.py new file mode 100644 index 0000000000000000000000000000000000000000..a37675cbd9bc9583fd01cc158198e2f4deda321b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/depends.py @@ -0,0 +1,176 @@ +import sys +import marshal +import contextlib +from distutils.version import StrictVersion + +from .py33compat import Bytecode + +from .py27compat import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE +from . import py27compat + + +__all__ = [ + 'Require', 'find_module', 'get_module_constant', 'extract_constant' +] + + +class Require: + """A prerequisite to building or installing a distribution""" + + def __init__( + self, name, requested_version, module, homepage='', + attribute=None, format=None): + + if format is None and requested_version is not None: + format = StrictVersion + + if format is not None: + requested_version = format(requested_version) + if attribute is None: + attribute = '__version__' + + self.__dict__.update(locals()) + del self.self + + def full_name(self): + """Return full package/distribution name, w/version""" + if self.requested_version is not None: + return '%s-%s' % (self.name, self.requested_version) + return self.name + + def version_ok(self, version): + """Is 'version' sufficiently up-to-date?""" + return self.attribute is None or self.format is None or \ + str(version) != "unknown" and version >= self.requested_version + + def get_version(self, paths=None, default="unknown"): + """Get version number of installed module, 'None', or 'default' + + Search 'paths' for module. If not found, return 'None'. If found, + return the extracted version attribute, or 'default' if no version + attribute was specified, or the value cannot be determined without + importing the module. The version is formatted according to the + requirement's version format (if any), unless it is 'None' or the + supplied 'default'. + """ + + if self.attribute is None: + try: + f, p, i = find_module(self.module, paths) + if f: + f.close() + return default + except ImportError: + return None + + v = get_module_constant(self.module, self.attribute, default, paths) + + if v is not None and v is not default and self.format is not None: + return self.format(v) + + return v + + def is_present(self, paths=None): + """Return true if dependency is present on 'paths'""" + return self.get_version(paths) is not None + + def is_current(self, paths=None): + """Return true if dependency is present and up-to-date on 'paths'""" + version = self.get_version(paths) + if version is None: + return False + return self.version_ok(version) + + +def maybe_close(f): + @contextlib.contextmanager + def empty(): + yield + return + if not f: + return empty() + + return contextlib.closing(f) + + +def get_module_constant(module, symbol, default=-1, paths=None): + """Find 'module' by searching 'paths', and extract 'symbol' + + Return 'None' if 'module' does not exist on 'paths', or it does not define + 'symbol'. If the module defines 'symbol' as a constant, return the + constant. Otherwise, return 'default'.""" + + try: + f, path, (suffix, mode, kind) = info = find_module(module, paths) + except ImportError: + # Module doesn't exist + return None + + with maybe_close(f): + if kind == PY_COMPILED: + f.read(8) # skip magic & date + code = marshal.load(f) + elif kind == PY_FROZEN: + code = py27compat.get_frozen_object(module, paths) + elif kind == PY_SOURCE: + code = compile(f.read(), path, 'exec') + else: + # Not something we can parse; we'll have to import it. :( + imported = py27compat.get_module(module, paths, info) + return getattr(imported, symbol, None) + + return extract_constant(code, symbol, default) + + +def extract_constant(code, symbol, default=-1): + """Extract the constant value of 'symbol' from 'code' + + If the name 'symbol' is bound to a constant value by the Python code + object 'code', return that value. If 'symbol' is bound to an expression, + return 'default'. Otherwise, return 'None'. + + Return value is based on the first assignment to 'symbol'. 'symbol' must + be a global, or at least a non-"fast" local in the code block. That is, + only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' + must be present in 'code.co_names'. + """ + if symbol not in code.co_names: + # name's not there, can't possibly be an assignment + return None + + name_idx = list(code.co_names).index(symbol) + + STORE_NAME = 90 + STORE_GLOBAL = 97 + LOAD_CONST = 100 + + const = default + + for byte_code in Bytecode(code): + op = byte_code.opcode + arg = byte_code.arg + + if op == LOAD_CONST: + const = code.co_consts[arg] + elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL): + return const + else: + const = default + + +def _update_globals(): + """ + Patch the globals to remove the objects not available on some platforms. + + XXX it'd be better to test assertions about bytecode instead. + """ + + if not sys.platform.startswith('java') and sys.platform != 'cli': + return + incompatible = 'extract_constant', 'get_module_constant' + for name in incompatible: + del globals()[name] + __all__.remove(name) + + +_update_globals() diff --git a/backend/test/lib/python3.8/site-packages/setuptools/dist.py b/backend/test/lib/python3.8/site-packages/setuptools/dist.py new file mode 100644 index 0000000000000000000000000000000000000000..f22429e8e191683da2cc83c7cc5eba205a541988 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/dist.py @@ -0,0 +1,1274 @@ +# -*- coding: utf-8 -*- +__all__ = ['Distribution'] + +import io +import sys +import re +import os +import warnings +import numbers +import distutils.log +import distutils.core +import distutils.cmd +import distutils.dist +from distutils.util import strtobool +from distutils.debug import DEBUG +from distutils.fancy_getopt import translate_longopt +import itertools + +from collections import defaultdict +from email import message_from_file + +from distutils.errors import ( + DistutilsOptionError, DistutilsPlatformError, DistutilsSetupError, +) +from distutils.util import rfc822_escape +from distutils.version import StrictVersion + +from setuptools.extern import six +from setuptools.extern import packaging +from setuptools.extern import ordered_set +from setuptools.extern.six.moves import map, filter, filterfalse + +from . import SetuptoolsDeprecationWarning + +from setuptools.depends import Require +from setuptools import windows_support +from setuptools.monkey import get_unpatched +from setuptools.config import parse_configuration +import pkg_resources + +__import__('setuptools.extern.packaging.specifiers') +__import__('setuptools.extern.packaging.version') + + +def _get_unpatched(cls): + warnings.warn("Do not call this function", DistDeprecationWarning) + return get_unpatched(cls) + + +def get_metadata_version(self): + mv = getattr(self, 'metadata_version', None) + + if mv is None: + if self.long_description_content_type or self.provides_extras: + mv = StrictVersion('2.1') + elif (self.maintainer is not None or + self.maintainer_email is not None or + getattr(self, 'python_requires', None) is not None or + self.project_urls): + mv = StrictVersion('1.2') + elif (self.provides or self.requires or self.obsoletes or + self.classifiers or self.download_url): + mv = StrictVersion('1.1') + else: + mv = StrictVersion('1.0') + + self.metadata_version = mv + + return mv + + +def read_pkg_file(self, file): + """Reads the metadata values from a file object.""" + msg = message_from_file(file) + + def _read_field(name): + value = msg[name] + if value == 'UNKNOWN': + return None + return value + + def _read_list(name): + values = msg.get_all(name, None) + if values == []: + return None + return values + + self.metadata_version = StrictVersion(msg['metadata-version']) + self.name = _read_field('name') + self.version = _read_field('version') + self.description = _read_field('summary') + # we are filling author only. + self.author = _read_field('author') + self.maintainer = None + self.author_email = _read_field('author-email') + self.maintainer_email = None + self.url = _read_field('home-page') + self.license = _read_field('license') + + if 'download-url' in msg: + self.download_url = _read_field('download-url') + else: + self.download_url = None + + self.long_description = _read_field('description') + self.description = _read_field('summary') + + if 'keywords' in msg: + self.keywords = _read_field('keywords').split(',') + + self.platforms = _read_list('platform') + self.classifiers = _read_list('classifier') + + # PEP 314 - these fields only exist in 1.1 + if self.metadata_version == StrictVersion('1.1'): + self.requires = _read_list('requires') + self.provides = _read_list('provides') + self.obsoletes = _read_list('obsoletes') + else: + self.requires = None + self.provides = None + self.obsoletes = None + + +# Based on Python 3.5 version +def write_pkg_file(self, file): + """Write the PKG-INFO format data to a file object. + """ + version = self.get_metadata_version() + + if six.PY2: + def write_field(key, value): + file.write("%s: %s\n" % (key, self._encode_field(value))) + else: + def write_field(key, value): + file.write("%s: %s\n" % (key, value)) + + write_field('Metadata-Version', str(version)) + write_field('Name', self.get_name()) + write_field('Version', self.get_version()) + write_field('Summary', self.get_description()) + write_field('Home-page', self.get_url()) + + if version < StrictVersion('1.2'): + write_field('Author', self.get_contact()) + write_field('Author-email', self.get_contact_email()) + else: + optional_fields = ( + ('Author', 'author'), + ('Author-email', 'author_email'), + ('Maintainer', 'maintainer'), + ('Maintainer-email', 'maintainer_email'), + ) + + for field, attr in optional_fields: + attr_val = getattr(self, attr) + + if attr_val is not None: + write_field(field, attr_val) + + write_field('License', self.get_license()) + if self.download_url: + write_field('Download-URL', self.download_url) + for project_url in self.project_urls.items(): + write_field('Project-URL', '%s, %s' % project_url) + + long_desc = rfc822_escape(self.get_long_description()) + write_field('Description', long_desc) + + keywords = ','.join(self.get_keywords()) + if keywords: + write_field('Keywords', keywords) + + if version >= StrictVersion('1.2'): + for platform in self.get_platforms(): + write_field('Platform', platform) + else: + self._write_list(file, 'Platform', self.get_platforms()) + + self._write_list(file, 'Classifier', self.get_classifiers()) + + # PEP 314 + self._write_list(file, 'Requires', self.get_requires()) + self._write_list(file, 'Provides', self.get_provides()) + self._write_list(file, 'Obsoletes', self.get_obsoletes()) + + # Setuptools specific for PEP 345 + if hasattr(self, 'python_requires'): + write_field('Requires-Python', self.python_requires) + + # PEP 566 + if self.long_description_content_type: + write_field( + 'Description-Content-Type', + self.long_description_content_type + ) + if self.provides_extras: + for extra in sorted(self.provides_extras): + write_field('Provides-Extra', extra) + + +sequence = tuple, list + + +def check_importable(dist, attr, value): + try: + ep = pkg_resources.EntryPoint.parse('x=' + value) + assert not ep.extras + except (TypeError, ValueError, AttributeError, AssertionError): + raise DistutilsSetupError( + "%r must be importable 'module:attrs' string (got %r)" + % (attr, value) + ) + + +def assert_string_list(dist, attr, value): + """Verify that value is a string list""" + try: + # verify that value is a list or tuple to exclude unordered + # or single-use iterables + assert isinstance(value, (list, tuple)) + # verify that elements of value are strings + assert ''.join(value) != value + except (TypeError, ValueError, AttributeError, AssertionError): + raise DistutilsSetupError( + "%r must be a list of strings (got %r)" % (attr, value) + ) + + +def check_nsp(dist, attr, value): + """Verify that namespace packages are valid""" + ns_packages = value + assert_string_list(dist, attr, ns_packages) + for nsp in ns_packages: + if not dist.has_contents_for(nsp): + raise DistutilsSetupError( + "Distribution contains no modules or packages for " + + "namespace package %r" % nsp + ) + parent, sep, child = nsp.rpartition('.') + if parent and parent not in ns_packages: + distutils.log.warn( + "WARNING: %r is declared as a package namespace, but %r" + " is not: please correct this in setup.py", nsp, parent + ) + + +def check_extras(dist, attr, value): + """Verify that extras_require mapping is valid""" + try: + list(itertools.starmap(_check_extra, value.items())) + except (TypeError, ValueError, AttributeError): + raise DistutilsSetupError( + "'extras_require' must be a dictionary whose values are " + "strings or lists of strings containing valid project/version " + "requirement specifiers." + ) + + +def _check_extra(extra, reqs): + name, sep, marker = extra.partition(':') + if marker and pkg_resources.invalid_marker(marker): + raise DistutilsSetupError("Invalid environment marker: " + marker) + list(pkg_resources.parse_requirements(reqs)) + + +def assert_bool(dist, attr, value): + """Verify that value is True, False, 0, or 1""" + if bool(value) != value: + tmpl = "{attr!r} must be a boolean value (got {value!r})" + raise DistutilsSetupError(tmpl.format(attr=attr, value=value)) + + +def check_requirements(dist, attr, value): + """Verify that install_requires is a valid requirements list""" + try: + list(pkg_resources.parse_requirements(value)) + if isinstance(value, (dict, set)): + raise TypeError("Unordered types are not allowed") + except (TypeError, ValueError) as error: + tmpl = ( + "{attr!r} must be a string or list of strings " + "containing valid project/version requirement specifiers; {error}" + ) + raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) + + +def check_specifier(dist, attr, value): + """Verify that value is a valid version specifier""" + try: + packaging.specifiers.SpecifierSet(value) + except packaging.specifiers.InvalidSpecifier as error: + tmpl = ( + "{attr!r} must be a string " + "containing valid version specifiers; {error}" + ) + raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) + + +def check_entry_points(dist, attr, value): + """Verify that entry_points map is parseable""" + try: + pkg_resources.EntryPoint.parse_map(value) + except ValueError as e: + raise DistutilsSetupError(e) + + +def check_test_suite(dist, attr, value): + if not isinstance(value, six.string_types): + raise DistutilsSetupError("test_suite must be a string") + + +def check_package_data(dist, attr, value): + """Verify that value is a dictionary of package names to glob lists""" + if not isinstance(value, dict): + raise DistutilsSetupError( + "{!r} must be a dictionary mapping package names to lists of " + "string wildcard patterns".format(attr)) + for k, v in value.items(): + if not isinstance(k, six.string_types): + raise DistutilsSetupError( + "keys of {!r} dict must be strings (got {!r})" + .format(attr, k) + ) + assert_string_list(dist, 'values of {!r} dict'.format(attr), v) + + +def check_packages(dist, attr, value): + for pkgname in value: + if not re.match(r'\w+(\.\w+)*', pkgname): + distutils.log.warn( + "WARNING: %r not a valid package name; please use only " + ".-separated package names in setup.py", pkgname + ) + + +_Distribution = get_unpatched(distutils.core.Distribution) + + +class Distribution(_Distribution): + """Distribution with support for features, tests, and package data + + This is an enhanced version of 'distutils.dist.Distribution' that + effectively adds the following new optional keyword arguments to 'setup()': + + 'install_requires' -- a string or sequence of strings specifying project + versions that the distribution requires when installed, in the format + used by 'pkg_resources.require()'. They will be installed + automatically when the package is installed. If you wish to use + packages that are not available in PyPI, or want to give your users an + alternate download location, you can add a 'find_links' option to the + '[easy_install]' section of your project's 'setup.cfg' file, and then + setuptools will scan the listed web pages for links that satisfy the + requirements. + + 'extras_require' -- a dictionary mapping names of optional "extras" to the + additional requirement(s) that using those extras incurs. For example, + this:: + + extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) + + indicates that the distribution can optionally provide an extra + capability called "reST", but it can only be used if docutils and + reSTedit are installed. If the user installs your package using + EasyInstall and requests one of your extras, the corresponding + additional requirements will be installed if needed. + + 'features' **deprecated** -- a dictionary mapping option names to + 'setuptools.Feature' + objects. Features are a portion of the distribution that can be + included or excluded based on user options, inter-feature dependencies, + and availability on the current system. Excluded features are omitted + from all setup commands, including source and binary distributions, so + you can create multiple distributions from the same source tree. + Feature names should be valid Python identifiers, except that they may + contain the '-' (minus) sign. Features can be included or excluded + via the command line options '--with-X' and '--without-X', where 'X' is + the name of the feature. Whether a feature is included by default, and + whether you are allowed to control this from the command line, is + determined by the Feature object. See the 'Feature' class for more + information. + + 'test_suite' -- the name of a test suite to run for the 'test' command. + If the user runs 'python setup.py test', the package will be installed, + and the named test suite will be run. The format is the same as + would be used on a 'unittest.py' command line. That is, it is the + dotted name of an object to import and call to generate a test suite. + + 'package_data' -- a dictionary mapping package names to lists of filenames + or globs to use to find data files contained in the named packages. + If the dictionary has filenames or globs listed under '""' (the empty + string), those names will be searched for in every package, in addition + to any names for the specific package. Data files found using these + names/globs will be installed along with the package, in the same + location as the package. Note that globs are allowed to reference + the contents of non-package subdirectories, as long as you use '/' as + a path separator. (Globs are automatically converted to + platform-specific paths at runtime.) + + In addition to these new keywords, this class also has several new methods + for manipulating the distribution's contents. For example, the 'include()' + and 'exclude()' methods can be thought of as in-place add and subtract + commands that add or remove packages, modules, extensions, and so on from + the distribution. They are used by the feature subsystem to configure the + distribution for the included and excluded features. + """ + + _DISTUTILS_UNSUPPORTED_METADATA = { + 'long_description_content_type': None, + 'project_urls': dict, + 'provides_extras': ordered_set.OrderedSet, + 'license_files': ordered_set.OrderedSet, + } + + _patched_dist = None + + def patch_missing_pkg_info(self, attrs): + # Fake up a replacement for the data that would normally come from + # PKG-INFO, but which might not yet be built if this is a fresh + # checkout. + # + if not attrs or 'name' not in attrs or 'version' not in attrs: + return + key = pkg_resources.safe_name(str(attrs['name'])).lower() + dist = pkg_resources.working_set.by_key.get(key) + if dist is not None and not dist.has_metadata('PKG-INFO'): + dist._version = pkg_resources.safe_version(str(attrs['version'])) + self._patched_dist = dist + + def __init__(self, attrs=None): + have_package_data = hasattr(self, "package_data") + if not have_package_data: + self.package_data = {} + attrs = attrs or {} + if 'features' in attrs or 'require_features' in attrs: + Feature.warn_deprecated() + self.require_features = [] + self.features = {} + self.dist_files = [] + # Filter-out setuptools' specific options. + self.src_root = attrs.pop("src_root", None) + self.patch_missing_pkg_info(attrs) + self.dependency_links = attrs.pop('dependency_links', []) + self.setup_requires = attrs.pop('setup_requires', []) + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + vars(self).setdefault(ep.name, None) + _Distribution.__init__(self, { + k: v for k, v in attrs.items() + if k not in self._DISTUTILS_UNSUPPORTED_METADATA + }) + + # Fill-in missing metadata fields not supported by distutils. + # Note some fields may have been set by other tools (e.g. pbr) + # above; they are taken preferrentially to setup() arguments + for option, default in self._DISTUTILS_UNSUPPORTED_METADATA.items(): + for source in self.metadata.__dict__, attrs: + if option in source: + value = source[option] + break + else: + value = default() if default else None + setattr(self.metadata, option, value) + + if isinstance(self.metadata.version, numbers.Number): + # Some people apparently take "version number" too literally :) + self.metadata.version = str(self.metadata.version) + + if self.metadata.version is not None: + try: + ver = packaging.version.Version(self.metadata.version) + normalized_version = str(ver) + if self.metadata.version != normalized_version: + warnings.warn( + "Normalizing '%s' to '%s'" % ( + self.metadata.version, + normalized_version, + ) + ) + self.metadata.version = normalized_version + except (packaging.version.InvalidVersion, TypeError): + warnings.warn( + "The version specified (%r) is an invalid version, this " + "may not work as expected with newer versions of " + "setuptools, pip, and PyPI. Please see PEP 440 for more " + "details." % self.metadata.version + ) + self._finalize_requires() + + def _finalize_requires(self): + """ + Set `metadata.python_requires` and fix environment markers + in `install_requires` and `extras_require`. + """ + if getattr(self, 'python_requires', None): + self.metadata.python_requires = self.python_requires + + if getattr(self, 'extras_require', None): + for extra in self.extras_require.keys(): + # Since this gets called multiple times at points where the + # keys have become 'converted' extras, ensure that we are only + # truly adding extras we haven't seen before here. + extra = extra.split(':')[0] + if extra: + self.metadata.provides_extras.add(extra) + + self._convert_extras_requirements() + self._move_install_requirements_markers() + + def _convert_extras_requirements(self): + """ + Convert requirements in `extras_require` of the form + `"extra": ["barbazquux; {marker}"]` to + `"extra:{marker}": ["barbazquux"]`. + """ + spec_ext_reqs = getattr(self, 'extras_require', None) or {} + self._tmp_extras_require = defaultdict(list) + for section, v in spec_ext_reqs.items(): + # Do not strip empty sections. + self._tmp_extras_require[section] + for r in pkg_resources.parse_requirements(v): + suffix = self._suffix_for(r) + self._tmp_extras_require[section + suffix].append(r) + + @staticmethod + def _suffix_for(req): + """ + For a requirement, return the 'extras_require' suffix for + that requirement. + """ + return ':' + str(req.marker) if req.marker else '' + + def _move_install_requirements_markers(self): + """ + Move requirements in `install_requires` that are using environment + markers `extras_require`. + """ + + # divide the install_requires into two sets, simple ones still + # handled by install_requires and more complex ones handled + # by extras_require. + + def is_simple_req(req): + return not req.marker + + spec_inst_reqs = getattr(self, 'install_requires', None) or () + inst_reqs = list(pkg_resources.parse_requirements(spec_inst_reqs)) + simple_reqs = filter(is_simple_req, inst_reqs) + complex_reqs = filterfalse(is_simple_req, inst_reqs) + self.install_requires = list(map(str, simple_reqs)) + + for r in complex_reqs: + self._tmp_extras_require[':' + str(r.marker)].append(r) + self.extras_require = dict( + (k, [str(r) for r in map(self._clean_req, v)]) + for k, v in self._tmp_extras_require.items() + ) + + def _clean_req(self, req): + """ + Given a Requirement, remove environment markers and return it. + """ + req.marker = None + return req + + def _parse_config_files(self, filenames=None): + """ + Adapted from distutils.dist.Distribution.parse_config_files, + this method provides the same functionality in subtly-improved + ways. + """ + from setuptools.extern.six.moves.configparser import ConfigParser + + # Ignore install directory options if we have a venv + if six.PY3 and sys.prefix != sys.base_prefix: + ignore_options = [ + 'install-base', 'install-platbase', 'install-lib', + 'install-platlib', 'install-purelib', 'install-headers', + 'install-scripts', 'install-data', 'prefix', 'exec-prefix', + 'home', 'user', 'root'] + else: + ignore_options = [] + + ignore_options = frozenset(ignore_options) + + if filenames is None: + filenames = self.find_config_files() + + if DEBUG: + self.announce("Distribution.parse_config_files():") + + parser = ConfigParser() + for filename in filenames: + with io.open(filename, encoding='utf-8') as reader: + if DEBUG: + self.announce(" reading {filename}".format(**locals())) + (parser.read_file if six.PY3 else parser.readfp)(reader) + for section in parser.sections(): + options = parser.options(section) + opt_dict = self.get_option_dict(section) + + for opt in options: + if opt != '__name__' and opt not in ignore_options: + val = self._try_str(parser.get(section, opt)) + opt = opt.replace('-', '_') + opt_dict[opt] = (filename, val) + + # Make the ConfigParser forget everything (so we retain + # the original filenames that options come from) + parser.__init__() + + # If there was a "global" section in the config file, use it + # to set Distribution options. + + if 'global' in self.command_options: + for (opt, (src, val)) in self.command_options['global'].items(): + alias = self.negative_opt.get(opt) + try: + if alias: + setattr(self, alias, not strtobool(val)) + elif opt in ('verbose', 'dry_run'): # ugh! + setattr(self, opt, strtobool(val)) + else: + setattr(self, opt, val) + except ValueError as msg: + raise DistutilsOptionError(msg) + + @staticmethod + def _try_str(val): + """ + On Python 2, much of distutils relies on string values being of + type 'str' (bytes) and not unicode text. If the value can be safely + encoded to bytes using the default encoding, prefer that. + + Why the default encoding? Because that value can be implicitly + decoded back to text if needed. + + Ref #1653 + """ + if six.PY3: + return val + try: + return val.encode() + except UnicodeEncodeError: + pass + return val + + def _set_command_options(self, command_obj, option_dict=None): + """ + Set the options for 'command_obj' from 'option_dict'. Basically + this means copying elements of a dictionary ('option_dict') to + attributes of an instance ('command'). + + 'command_obj' must be a Command instance. If 'option_dict' is not + supplied, uses the standard option dictionary for this command + (from 'self.command_options'). + + (Adopted from distutils.dist.Distribution._set_command_options) + """ + command_name = command_obj.get_command_name() + if option_dict is None: + option_dict = self.get_option_dict(command_name) + + if DEBUG: + self.announce(" setting options for '%s' command:" % command_name) + for (option, (source, value)) in option_dict.items(): + if DEBUG: + self.announce(" %s = %s (from %s)" % (option, value, + source)) + try: + bool_opts = [translate_longopt(o) + for o in command_obj.boolean_options] + except AttributeError: + bool_opts = [] + try: + neg_opt = command_obj.negative_opt + except AttributeError: + neg_opt = {} + + try: + is_string = isinstance(value, six.string_types) + if option in neg_opt and is_string: + setattr(command_obj, neg_opt[option], not strtobool(value)) + elif option in bool_opts and is_string: + setattr(command_obj, option, strtobool(value)) + elif hasattr(command_obj, option): + setattr(command_obj, option, value) + else: + raise DistutilsOptionError( + "error in %s: command '%s' has no such option '%s'" + % (source, command_name, option)) + except ValueError as msg: + raise DistutilsOptionError(msg) + + def parse_config_files(self, filenames=None, ignore_option_errors=False): + """Parses configuration files from various levels + and loads configuration. + + """ + self._parse_config_files(filenames=filenames) + + parse_configuration(self, self.command_options, + ignore_option_errors=ignore_option_errors) + self._finalize_requires() + + def parse_command_line(self): + """Process features after parsing command line options""" + result = _Distribution.parse_command_line(self) + if self.features: + self._finalize_features() + return result + + def _feature_attrname(self, name): + """Convert feature name to corresponding option attribute name""" + return 'with_' + name.replace('-', '_') + + def fetch_build_eggs(self, requires): + """Resolve pre-setup requirements""" + resolved_dists = pkg_resources.working_set.resolve( + pkg_resources.parse_requirements(requires), + installer=self.fetch_build_egg, + replace_conflicting=True, + ) + for dist in resolved_dists: + pkg_resources.working_set.add(dist, replace=True) + return resolved_dists + + def finalize_options(self): + """ + Allow plugins to apply arbitrary operations to the + distribution. Each hook may optionally define a 'order' + to influence the order of execution. Smaller numbers + go first and the default is 0. + """ + hook_key = 'setuptools.finalize_distribution_options' + + def by_order(hook): + return getattr(hook, 'order', 0) + eps = pkg_resources.iter_entry_points(hook_key) + for ep in sorted(eps, key=by_order): + ep.load()(self) + + def _finalize_setup_keywords(self): + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + value = getattr(self, ep.name, None) + if value is not None: + ep.require(installer=self.fetch_build_egg) + ep.load()(self, ep.name, value) + + def _finalize_2to3_doctests(self): + if getattr(self, 'convert_2to3_doctests', None): + # XXX may convert to set here when we can rely on set being builtin + self.convert_2to3_doctests = [ + os.path.abspath(p) + for p in self.convert_2to3_doctests + ] + else: + self.convert_2to3_doctests = [] + + def get_egg_cache_dir(self): + egg_cache_dir = os.path.join(os.curdir, '.eggs') + if not os.path.exists(egg_cache_dir): + os.mkdir(egg_cache_dir) + windows_support.hide_file(egg_cache_dir) + readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') + with open(readme_txt_filename, 'w') as f: + f.write('This directory contains eggs that were downloaded ' + 'by setuptools to build, test, and run plug-ins.\n\n') + f.write('This directory caches those eggs to prevent ' + 'repeated downloads.\n\n') + f.write('However, it is safe to delete this directory.\n\n') + + return egg_cache_dir + + def fetch_build_egg(self, req): + """Fetch an egg needed for building""" + from setuptools.installer import fetch_build_egg + return fetch_build_egg(self, req) + + def _finalize_feature_opts(self): + """Add --with-X/--without-X options based on optional features""" + + if not self.features: + return + + go = [] + no = self.negative_opt.copy() + + for name, feature in self.features.items(): + self._set_feature(name, None) + feature.validate(self) + + if feature.optional: + descr = feature.description + incdef = ' (default)' + excdef = '' + if not feature.include_by_default(): + excdef, incdef = incdef, excdef + + new = ( + ('with-' + name, None, 'include ' + descr + incdef), + ('without-' + name, None, 'exclude ' + descr + excdef), + ) + go.extend(new) + no['without-' + name] = 'with-' + name + + self.global_options = self.feature_options = go + self.global_options + self.negative_opt = self.feature_negopt = no + + def _finalize_features(self): + """Add/remove features and resolve dependencies between them""" + + # First, flag all the enabled items (and thus their dependencies) + for name, feature in self.features.items(): + enabled = self.feature_is_included(name) + if enabled or (enabled is None and feature.include_by_default()): + feature.include_in(self) + self._set_feature(name, 1) + + # Then disable the rest, so that off-by-default features don't + # get flagged as errors when they're required by an enabled feature + for name, feature in self.features.items(): + if not self.feature_is_included(name): + feature.exclude_from(self) + self._set_feature(name, 0) + + def get_command_class(self, command): + """Pluggable version of get_command_class()""" + if command in self.cmdclass: + return self.cmdclass[command] + + eps = pkg_resources.iter_entry_points('distutils.commands', command) + for ep in eps: + ep.require(installer=self.fetch_build_egg) + self.cmdclass[command] = cmdclass = ep.load() + return cmdclass + else: + return _Distribution.get_command_class(self, command) + + def print_commands(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() + self.cmdclass[ep.name] = cmdclass + return _Distribution.print_commands(self) + + def get_command_list(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() + self.cmdclass[ep.name] = cmdclass + return _Distribution.get_command_list(self) + + def _set_feature(self, name, status): + """Set feature's inclusion status""" + setattr(self, self._feature_attrname(name), status) + + def feature_is_included(self, name): + """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" + return getattr(self, self._feature_attrname(name)) + + def include_feature(self, name): + """Request inclusion of feature named 'name'""" + + if self.feature_is_included(name) == 0: + descr = self.features[name].description + raise DistutilsOptionError( + descr + " is required, but was excluded or is not available" + ) + self.features[name].include_in(self) + self._set_feature(name, 1) + + def include(self, **attrs): + """Add items to distribution that are named in keyword arguments + + For example, 'dist.include(py_modules=["x"])' would add 'x' to + the distribution's 'py_modules' attribute, if it was not already + there. + + Currently, this method only supports inclusion for attributes that are + lists or tuples. If you need to add support for adding to other + attributes in this or a subclass, you can add an '_include_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' + will try to call 'dist._include_foo({"bar":"baz"})', which can then + handle whatever special inclusion logic is needed. + """ + for k, v in attrs.items(): + include = getattr(self, '_include_' + k, None) + if include: + include(v) + else: + self._include_misc(k, v) + + def exclude_package(self, package): + """Remove packages, modules, and extensions in named package""" + + pfx = package + '.' + if self.packages: + self.packages = [ + p for p in self.packages + if p != package and not p.startswith(pfx) + ] + + if self.py_modules: + self.py_modules = [ + p for p in self.py_modules + if p != package and not p.startswith(pfx) + ] + + if self.ext_modules: + self.ext_modules = [ + p for p in self.ext_modules + if p.name != package and not p.name.startswith(pfx) + ] + + def has_contents_for(self, package): + """Return true if 'exclude_package(package)' would do something""" + + pfx = package + '.' + + for p in self.iter_distribution_names(): + if p == package or p.startswith(pfx): + return True + + def _exclude_misc(self, name, value): + """Handle 'exclude()' for list/tuple attrs without a special handler""" + if not isinstance(value, sequence): + raise DistutilsSetupError( + "%s: setting must be a list or tuple (%r)" % (name, value) + ) + try: + old = getattr(self, name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is not None and not isinstance(old, sequence): + raise DistutilsSetupError( + name + ": this setting cannot be changed via include/exclude" + ) + elif old: + setattr(self, name, [item for item in old if item not in value]) + + def _include_misc(self, name, value): + """Handle 'include()' for list/tuple attrs without a special handler""" + + if not isinstance(value, sequence): + raise DistutilsSetupError( + "%s: setting must be a list (%r)" % (name, value) + ) + try: + old = getattr(self, name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is None: + setattr(self, name, value) + elif not isinstance(old, sequence): + raise DistutilsSetupError( + name + ": this setting cannot be changed via include/exclude" + ) + else: + new = [item for item in value if item not in old] + setattr(self, name, old + new) + + def exclude(self, **attrs): + """Remove items from distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from + the distribution's 'py_modules' attribute. Excluding packages uses + the 'exclude_package()' method, so all of the package's contained + packages, modules, and extensions are also excluded. + + Currently, this method only supports exclusion from attributes that are + lists or tuples. If you need to add support for excluding from other + attributes in this or a subclass, you can add an '_exclude_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' + will try to call 'dist._exclude_foo({"bar":"baz"})', which can then + handle whatever special exclusion logic is needed. + """ + for k, v in attrs.items(): + exclude = getattr(self, '_exclude_' + k, None) + if exclude: + exclude(v) + else: + self._exclude_misc(k, v) + + def _exclude_packages(self, packages): + if not isinstance(packages, sequence): + raise DistutilsSetupError( + "packages: setting must be a list or tuple (%r)" % (packages,) + ) + list(map(self.exclude_package, packages)) + + def _parse_command_opts(self, parser, args): + # Remove --with-X/--without-X options when processing command args + self.global_options = self.__class__.global_options + self.negative_opt = self.__class__.negative_opt + + # First, expand any aliases + command = args[0] + aliases = self.get_option_dict('aliases') + while command in aliases: + src, alias = aliases[command] + del aliases[command] # ensure each alias can expand only once! + import shlex + args[:1] = shlex.split(alias, True) + command = args[0] + + nargs = _Distribution._parse_command_opts(self, parser, args) + + # Handle commands that want to consume all remaining arguments + cmd_class = self.get_command_class(command) + if getattr(cmd_class, 'command_consumes_arguments', None): + self.get_option_dict(command)['args'] = ("command line", nargs) + if nargs is not None: + return [] + + return nargs + + def get_cmdline_options(self): + """Return a '{cmd: {opt:val}}' map of all command-line options + + Option names are all long, but do not include the leading '--', and + contain dashes rather than underscores. If the option doesn't take + an argument (e.g. '--quiet'), the 'val' is 'None'. + + Note that options provided by config files are intentionally excluded. + """ + + d = {} + + for cmd, opts in self.command_options.items(): + + for opt, (src, val) in opts.items(): + + if src != "command line": + continue + + opt = opt.replace('_', '-') + + if val == 0: + cmdobj = self.get_command_obj(cmd) + neg_opt = self.negative_opt.copy() + neg_opt.update(getattr(cmdobj, 'negative_opt', {})) + for neg, pos in neg_opt.items(): + if pos == opt: + opt = neg + val = None + break + else: + raise AssertionError("Shouldn't be able to get here") + + elif val == 1: + val = None + + d.setdefault(cmd, {})[opt] = val + + return d + + def iter_distribution_names(self): + """Yield all packages, modules, and extension names in distribution""" + + for pkg in self.packages or (): + yield pkg + + for module in self.py_modules or (): + yield module + + for ext in self.ext_modules or (): + if isinstance(ext, tuple): + name, buildinfo = ext + else: + name = ext.name + if name.endswith('module'): + name = name[:-6] + yield name + + def handle_display_options(self, option_order): + """If there were any non-global "display-only" options + (--help-commands or the metadata display options) on the command + line, display the requested info and return true; else return + false. + """ + import sys + + if six.PY2 or self.help_commands: + return _Distribution.handle_display_options(self, option_order) + + # Stdout may be StringIO (e.g. in tests) + if not isinstance(sys.stdout, io.TextIOWrapper): + return _Distribution.handle_display_options(self, option_order) + + # Don't wrap stdout if utf-8 is already the encoding. Provides + # workaround for #334. + if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): + return _Distribution.handle_display_options(self, option_order) + + # Print metadata in UTF-8 no matter the platform + encoding = sys.stdout.encoding + errors = sys.stdout.errors + newline = sys.platform != 'win32' and '\n' or None + line_buffering = sys.stdout.line_buffering + + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) + try: + return _Distribution.handle_display_options(self, option_order) + finally: + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), encoding, errors, newline, line_buffering) + + +class Feature: + """ + **deprecated** -- The `Feature` facility was never completely implemented + or supported, `has reported issues + <https://github.com/pypa/setuptools/issues/58>`_ and will be removed in + a future version. + + A subset of the distribution that can be excluded if unneeded/wanted + + Features are created using these keyword arguments: + + 'description' -- a short, human readable description of the feature, to + be used in error messages, and option help messages. + + 'standard' -- if true, the feature is included by default if it is + available on the current system. Otherwise, the feature is only + included if requested via a command line '--with-X' option, or if + another included feature requires it. The default setting is 'False'. + + 'available' -- if true, the feature is available for installation on the + current system. The default setting is 'True'. + + 'optional' -- if true, the feature's inclusion can be controlled from the + command line, using the '--with-X' or '--without-X' options. If + false, the feature's inclusion status is determined automatically, + based on 'availabile', 'standard', and whether any other feature + requires it. The default setting is 'True'. + + 'require_features' -- a string or sequence of strings naming features + that should also be included if this feature is included. Defaults to + empty list. May also contain 'Require' objects that should be + added/removed from the distribution. + + 'remove' -- a string or list of strings naming packages to be removed + from the distribution if this feature is *not* included. If the + feature *is* included, this argument is ignored. This argument exists + to support removing features that "crosscut" a distribution, such as + defining a 'tests' feature that removes all the 'tests' subpackages + provided by other features. The default for this argument is an empty + list. (Note: the named package(s) or modules must exist in the base + distribution when the 'setup()' function is initially called.) + + other keywords -- any other keyword arguments are saved, and passed to + the distribution's 'include()' and 'exclude()' methods when the + feature is included or excluded, respectively. So, for example, you + could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be + added or removed from the distribution as appropriate. + + A feature must include at least one 'requires', 'remove', or other + keyword argument. Otherwise, it can't affect the distribution in any way. + Note also that you can subclass 'Feature' to create your own specialized + feature types that modify the distribution in other ways when included or + excluded. See the docstrings for the various methods here for more detail. + Aside from the methods, the only feature attributes that distributions look + at are 'description' and 'optional'. + """ + + @staticmethod + def warn_deprecated(): + msg = ( + "Features are deprecated and will be removed in a future " + "version. See https://github.com/pypa/setuptools/issues/65." + ) + warnings.warn(msg, DistDeprecationWarning, stacklevel=3) + + def __init__( + self, description, standard=False, available=True, + optional=True, require_features=(), remove=(), **extras): + self.warn_deprecated() + + self.description = description + self.standard = standard + self.available = available + self.optional = optional + if isinstance(require_features, (str, Require)): + require_features = require_features, + + self.require_features = [ + r for r in require_features if isinstance(r, str) + ] + er = [r for r in require_features if not isinstance(r, str)] + if er: + extras['require_features'] = er + + if isinstance(remove, str): + remove = remove, + self.remove = remove + self.extras = extras + + if not remove and not require_features and not extras: + raise DistutilsSetupError( + "Feature %s: must define 'require_features', 'remove', or " + "at least one of 'packages', 'py_modules', etc." + ) + + def include_by_default(self): + """Should this feature be included by default?""" + return self.available and self.standard + + def include_in(self, dist): + """Ensure feature and its requirements are included in distribution + + You may override this in a subclass to perform additional operations on + the distribution. Note that this method may be called more than once + per feature, and so should be idempotent. + + """ + + if not self.available: + raise DistutilsPlatformError( + self.description + " is required, " + "but is not available on this platform" + ) + + dist.include(**self.extras) + + for f in self.require_features: + dist.include_feature(f) + + def exclude_from(self, dist): + """Ensure feature is excluded from distribution + + You may override this in a subclass to perform additional operations on + the distribution. This method will be called at most once per + feature, and only after all included features have been asked to + include themselves. + """ + + dist.exclude(**self.extras) + + if self.remove: + for item in self.remove: + dist.exclude_package(item) + + def validate(self, dist): + """Verify that feature makes sense in context of distribution + + This method is called by the distribution just before it parses its + command line. It checks to ensure that the 'remove' attribute, if any, + contains only valid package/module names that are present in the base + distribution when 'setup()' is called. You may override it in a + subclass to perform any other required validation of the feature + against a target distribution. + """ + + for item in self.remove: + if not dist.has_contents_for(item): + raise DistutilsSetupError( + "%s wants to be able to remove %s, but the distribution" + " doesn't contain any packages or modules under %s" + % (self.description, item, item) + ) + + +class DistDeprecationWarning(SetuptoolsDeprecationWarning): + """Class for warning about deprecations in dist in + setuptools. Not ignored by default, unlike DeprecationWarning.""" diff --git a/backend/test/lib/python3.8/site-packages/setuptools/errors.py b/backend/test/lib/python3.8/site-packages/setuptools/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..2701747f56cc77845159f2c5fee2d0ce114259af --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/errors.py @@ -0,0 +1,16 @@ +"""setuptools.errors + +Provides exceptions used by setuptools modules. +""" + +from distutils.errors import DistutilsError + + +class RemovedCommandError(DistutilsError, RuntimeError): + """Error used for commands that have been removed in setuptools. + + Since ``setuptools`` is built on ``distutils``, simply removing a command + from ``setuptools`` will make the behavior fall back to ``distutils``; this + error is raised if a command exists in ``distutils`` but has been actively + removed in ``setuptools``. + """ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/extension.py b/backend/test/lib/python3.8/site-packages/setuptools/extension.py new file mode 100644 index 0000000000000000000000000000000000000000..29468894f828128f4c36660167dd1f9e68e584be --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/extension.py @@ -0,0 +1,57 @@ +import re +import functools +import distutils.core +import distutils.errors +import distutils.extension + +from setuptools.extern.six.moves import map + +from .monkey import get_unpatched + + +def _have_cython(): + """ + Return True if Cython can be imported. + """ + cython_impl = 'Cython.Distutils.build_ext' + try: + # from (cython_impl) import build_ext + __import__(cython_impl, fromlist=['build_ext']).build_ext + return True + except Exception: + pass + return False + + +# for compatibility +have_pyrex = _have_cython + +_Extension = get_unpatched(distutils.core.Extension) + + +class Extension(_Extension): + """Extension that uses '.c' files in place of '.pyx' files""" + + def __init__(self, name, sources, *args, **kw): + # The *args is needed for compatibility as calls may use positional + # arguments. py_limited_api may be set only via keyword. + self.py_limited_api = kw.pop("py_limited_api", False) + _Extension.__init__(self, name, sources, *args, **kw) + + def _convert_pyx_sources_to_lang(self): + """ + Replace sources with .pyx extensions to sources with the target + language extension. This mechanism allows language authors to supply + pre-converted sources but to prefer the .pyx sources. + """ + if _have_cython(): + # the build has Cython, so allow it to compile the .pyx files + return + lang = self.language or '' + target_ext = '.cpp' if lang.lower() == 'c++' else '.c' + sub = functools.partial(re.sub, '.pyx$', target_ext) + self.sources = list(map(sub, self.sources)) + + +class Library(Extension): + """Just like a regular Extension, but built as a library instead""" diff --git a/backend/test/lib/python3.8/site-packages/setuptools/extern/__init__.py b/backend/test/lib/python3.8/site-packages/setuptools/extern/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e8c616f910bb9bb874c3d44f1efe5239ecb8f621 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/extern/__init__.py @@ -0,0 +1,73 @@ +import sys + + +class VendorImporter: + """ + A PEP 302 meta path importer for finding optionally-vendored + or otherwise naturally-installed packages from root_name. + """ + + def __init__(self, root_name, vendored_names=(), vendor_pkg=None): + self.root_name = root_name + self.vendored_names = set(vendored_names) + self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') + + @property + def search_path(self): + """ + Search first the vendor package then as a natural package. + """ + yield self.vendor_pkg + '.' + yield '' + + def find_module(self, fullname, path=None): + """ + Return self when fullname starts with root_name and the + target module is one vendored through this importer. + """ + root, base, target = fullname.partition(self.root_name + '.') + if root: + return + if not any(map(target.startswith, self.vendored_names)): + return + return self + + def load_module(self, fullname): + """ + Iterate over the search path to locate and load fullname. + """ + root, base, target = fullname.partition(self.root_name + '.') + for prefix in self.search_path: + try: + extant = prefix + target + __import__(extant) + mod = sys.modules[extant] + sys.modules[fullname] = mod + # mysterious hack: + # Remove the reference to the extant package/module + # on later Python versions to cause relative imports + # in the vendor package to resolve the same modules + # as those going through this importer. + if sys.version_info >= (3, ): + del sys.modules[extant] + return mod + except ImportError: + pass + else: + raise ImportError( + "The '{target}' package is required; " + "normally this is bundled with this package so if you get " + "this warning, consult the packager of your " + "distribution.".format(**locals()) + ) + + def install(self): + """ + Install this importer into sys.meta_path if not already present. + """ + if self not in sys.meta_path: + sys.meta_path.append(self) + + +names = 'six', 'packaging', 'pyparsing', 'ordered_set', +VendorImporter(__name__, names, 'setuptools._vendor').install() diff --git a/backend/test/lib/python3.8/site-packages/setuptools/extern/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/setuptools/extern/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9964b17091d19fe123750cd28a8469250401aec2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/extern/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/glob.py b/backend/test/lib/python3.8/site-packages/setuptools/glob.py new file mode 100644 index 0000000000000000000000000000000000000000..9d7cbc5da68da8605d271b9314befb206b87bca6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/glob.py @@ -0,0 +1,174 @@ +""" +Filename globbing utility. Mostly a copy of `glob` from Python 3.5. + +Changes include: + * `yield from` and PEP3102 `*` removed. + * Hidden files are not ignored. +""" + +import os +import re +import fnmatch + +__all__ = ["glob", "iglob", "escape"] + + +def glob(pathname, recursive=False): + """Return a list of paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ + return list(iglob(pathname, recursive=recursive)) + + +def iglob(pathname, recursive=False): + """Return an iterator which yields the paths matching a pathname pattern. + + The pattern may contain simple shell-style wildcards a la + fnmatch. However, unlike fnmatch, filenames starting with a + dot are special cases that are not matched by '*' and '?' + patterns. + + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. + """ + it = _iglob(pathname, recursive) + if recursive and _isrecursive(pathname): + s = next(it) # skip empty string + assert not s + return it + + +def _iglob(pathname, recursive): + dirname, basename = os.path.split(pathname) + if not has_magic(pathname): + if basename: + if os.path.lexists(pathname): + yield pathname + else: + # Patterns ending with a slash should match only directories + if os.path.isdir(dirname): + yield pathname + return + if not dirname: + if recursive and _isrecursive(basename): + for x in glob2(dirname, basename): + yield x + else: + for x in glob1(dirname, basename): + yield x + return + # `os.path.split()` returns the argument itself as a dirname if it is a + # drive or UNC path. Prevent an infinite recursion if a drive or UNC path + # contains magic characters (i.e. r'\\?\C:'). + if dirname != pathname and has_magic(dirname): + dirs = _iglob(dirname, recursive) + else: + dirs = [dirname] + if has_magic(basename): + if recursive and _isrecursive(basename): + glob_in_dir = glob2 + else: + glob_in_dir = glob1 + else: + glob_in_dir = glob0 + for dirname in dirs: + for name in glob_in_dir(dirname, basename): + yield os.path.join(dirname, name) + + +# These 2 helper functions non-recursively glob inside a literal directory. +# They return a list of basenames. `glob1` accepts a pattern while `glob0` +# takes a literal basename (so it only has to check for its existence). + + +def glob1(dirname, pattern): + if not dirname: + if isinstance(pattern, bytes): + dirname = os.curdir.encode('ASCII') + else: + dirname = os.curdir + try: + names = os.listdir(dirname) + except OSError: + return [] + return fnmatch.filter(names, pattern) + + +def glob0(dirname, basename): + if not basename: + # `os.path.split()` returns an empty basename for paths ending with a + # directory separator. 'q*x/' should match only directories. + if os.path.isdir(dirname): + return [basename] + else: + if os.path.lexists(os.path.join(dirname, basename)): + return [basename] + return [] + + +# This helper function recursively yields relative pathnames inside a literal +# directory. + + +def glob2(dirname, pattern): + assert _isrecursive(pattern) + yield pattern[:0] + for x in _rlistdir(dirname): + yield x + + +# Recursively yields relative pathnames inside a literal directory. +def _rlistdir(dirname): + if not dirname: + if isinstance(dirname, bytes): + dirname = os.curdir.encode('ASCII') + else: + dirname = os.curdir + try: + names = os.listdir(dirname) + except os.error: + return + for x in names: + yield x + path = os.path.join(dirname, x) if dirname else x + for y in _rlistdir(path): + yield os.path.join(x, y) + + +magic_check = re.compile('([*?[])') +magic_check_bytes = re.compile(b'([*?[])') + + +def has_magic(s): + if isinstance(s, bytes): + match = magic_check_bytes.search(s) + else: + match = magic_check.search(s) + return match is not None + + +def _isrecursive(pattern): + if isinstance(pattern, bytes): + return pattern == b'**' + else: + return pattern == '**' + + +def escape(pathname): + """Escape all special characters. + """ + # Escaping is done by wrapping any of "*?[" between square brackets. + # Metacharacters do not work in the drive part and shouldn't be escaped. + drive, pathname = os.path.splitdrive(pathname) + if isinstance(pathname, bytes): + pathname = magic_check_bytes.sub(br'[\1]', pathname) + else: + pathname = magic_check.sub(r'[\1]', pathname) + return drive + pathname diff --git a/backend/test/lib/python3.8/site-packages/setuptools/gui-32.exe b/backend/test/lib/python3.8/site-packages/setuptools/gui-32.exe new file mode 100644 index 0000000000000000000000000000000000000000..f8d3509653ba8f80ca7f3aa7f95616142ba83a94 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/gui-32.exe differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/gui-64.exe b/backend/test/lib/python3.8/site-packages/setuptools/gui-64.exe new file mode 100644 index 0000000000000000000000000000000000000000..330c51a5dde15a0bb610a48cd0ca11770c914dae Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/gui-64.exe differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/gui.exe b/backend/test/lib/python3.8/site-packages/setuptools/gui.exe new file mode 100644 index 0000000000000000000000000000000000000000..f8d3509653ba8f80ca7f3aa7f95616142ba83a94 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/setuptools/gui.exe differ diff --git a/backend/test/lib/python3.8/site-packages/setuptools/installer.py b/backend/test/lib/python3.8/site-packages/setuptools/installer.py new file mode 100644 index 0000000000000000000000000000000000000000..9f8be2ef8427651e3b0fbef497535e152dde66b1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/installer.py @@ -0,0 +1,150 @@ +import glob +import os +import subprocess +import sys +from distutils import log +from distutils.errors import DistutilsError + +import pkg_resources +from setuptools.command.easy_install import easy_install +from setuptools.extern import six +from setuptools.wheel import Wheel + +from .py31compat import TemporaryDirectory + + +def _fixup_find_links(find_links): + """Ensure find-links option end-up being a list of strings.""" + if isinstance(find_links, six.string_types): + return find_links.split() + assert isinstance(find_links, (tuple, list)) + return find_links + + +def _legacy_fetch_build_egg(dist, req): + """Fetch an egg needed for building. + + Legacy path using EasyInstall. + """ + tmp_dist = dist.__class__({'script_args': ['easy_install']}) + opts = tmp_dist.get_option_dict('easy_install') + opts.clear() + opts.update( + (k, v) + for k, v in dist.get_option_dict('easy_install').items() + if k in ( + # don't use any other settings + 'find_links', 'site_dirs', 'index_url', + 'optimize', 'site_dirs', 'allow_hosts', + )) + if dist.dependency_links: + links = dist.dependency_links[:] + if 'find_links' in opts: + links = _fixup_find_links(opts['find_links'][1]) + links + opts['find_links'] = ('setup', links) + install_dir = dist.get_egg_cache_dir() + cmd = easy_install( + tmp_dist, args=["x"], install_dir=install_dir, + exclude_scripts=True, + always_copy=False, build_directory=None, editable=False, + upgrade=False, multi_version=True, no_report=True, user=False + ) + cmd.ensure_finalized() + return cmd.easy_install(req) + + +def fetch_build_egg(dist, req): + """Fetch an egg needed for building. + + Use pip/wheel to fetch/build a wheel.""" + # Check pip is available. + try: + pkg_resources.get_distribution('pip') + except pkg_resources.DistributionNotFound: + dist.announce( + 'WARNING: The pip package is not available, falling back ' + 'to EasyInstall for handling setup_requires/test_requires; ' + 'this is deprecated and will be removed in a future version.' + , log.WARN + ) + return _legacy_fetch_build_egg(dist, req) + # Warn if wheel is not. + try: + pkg_resources.get_distribution('wheel') + except pkg_resources.DistributionNotFound: + dist.announce('WARNING: The wheel package is not available.', log.WARN) + # Ignore environment markers; if supplied, it is required. + req = strip_marker(req) + # Take easy_install options into account, but do not override relevant + # pip environment variables (like PIP_INDEX_URL or PIP_QUIET); they'll + # take precedence. + opts = dist.get_option_dict('easy_install') + if 'allow_hosts' in opts: + raise DistutilsError('the `allow-hosts` option is not supported ' + 'when using pip to install requirements.') + if 'PIP_QUIET' in os.environ or 'PIP_VERBOSE' in os.environ: + quiet = False + else: + quiet = True + if 'PIP_INDEX_URL' in os.environ: + index_url = None + elif 'index_url' in opts: + index_url = opts['index_url'][1] + else: + index_url = None + if 'find_links' in opts: + find_links = _fixup_find_links(opts['find_links'][1])[:] + else: + find_links = [] + if dist.dependency_links: + find_links.extend(dist.dependency_links) + eggs_dir = os.path.realpath(dist.get_egg_cache_dir()) + environment = pkg_resources.Environment() + for egg_dist in pkg_resources.find_distributions(eggs_dir): + if egg_dist in req and environment.can_add(egg_dist): + return egg_dist + with TemporaryDirectory() as tmpdir: + cmd = [ + sys.executable, '-m', 'pip', + '--disable-pip-version-check', + 'wheel', '--no-deps', + '-w', tmpdir, + ] + if quiet: + cmd.append('--quiet') + if index_url is not None: + cmd.extend(('--index-url', index_url)) + if find_links is not None: + for link in find_links: + cmd.extend(('--find-links', link)) + # If requirement is a PEP 508 direct URL, directly pass + # the URL to pip, as `req @ url` does not work on the + # command line. + if req.url: + cmd.append(req.url) + else: + cmd.append(str(req)) + try: + subprocess.check_call(cmd) + except subprocess.CalledProcessError as e: + raise DistutilsError(str(e)) + wheel = Wheel(glob.glob(os.path.join(tmpdir, '*.whl'))[0]) + dist_location = os.path.join(eggs_dir, wheel.egg_name()) + wheel.install_as_egg(dist_location) + dist_metadata = pkg_resources.PathMetadata( + dist_location, os.path.join(dist_location, 'EGG-INFO')) + dist = pkg_resources.Distribution.from_filename( + dist_location, metadata=dist_metadata) + return dist + + +def strip_marker(req): + """ + Return a new requirement without the environment marker to avoid + calling pip with something like `babel; extra == "i18n"`, which + would always be ignored. + """ + # create a copy to avoid mutating the input + req = pkg_resources.Requirement.parse(str(req)) + req.marker = None + return req diff --git a/backend/test/lib/python3.8/site-packages/setuptools/launch.py b/backend/test/lib/python3.8/site-packages/setuptools/launch.py new file mode 100644 index 0000000000000000000000000000000000000000..308283ea939ed9bced7b099eb8a1879aa9c203d4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/launch.py @@ -0,0 +1,35 @@ +""" +Launch the Python script on the command line after +setuptools is bootstrapped via import. +""" + +# Note that setuptools gets imported implicitly by the +# invocation of this script using python -m setuptools.launch + +import tokenize +import sys + + +def run(): + """ + Run the script in sys.argv[1] as if it had + been invoked naturally. + """ + __builtins__ + script_name = sys.argv[1] + namespace = dict( + __file__=script_name, + __name__='__main__', + __doc__=None, + ) + sys.argv[:] = sys.argv[1:] + + open_ = getattr(tokenize, 'open', open) + script = open_(script_name).read() + norm_script = script.replace('\\r\\n', '\\n') + code = compile(norm_script, script_name, 'exec') + exec(code, namespace) + + +if __name__ == '__main__': + run() diff --git a/backend/test/lib/python3.8/site-packages/setuptools/lib2to3_ex.py b/backend/test/lib/python3.8/site-packages/setuptools/lib2to3_ex.py new file mode 100644 index 0000000000000000000000000000000000000000..4b1a73feb26fdad65bafdeb21f5ce6abfb905fc0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/lib2to3_ex.py @@ -0,0 +1,62 @@ +""" +Customized Mixin2to3 support: + + - adds support for converting doctests + + +This module raises an ImportError on Python 2. +""" + +from distutils.util import Mixin2to3 as _Mixin2to3 +from distutils import log +from lib2to3.refactor import RefactoringTool, get_fixers_from_package + +import setuptools + + +class DistutilsRefactoringTool(RefactoringTool): + def log_error(self, msg, *args, **kw): + log.error(msg, *args) + + def log_message(self, msg, *args): + log.info(msg, *args) + + def log_debug(self, msg, *args): + log.debug(msg, *args) + + +class Mixin2to3(_Mixin2to3): + def run_2to3(self, files, doctests=False): + # See of the distribution option has been set, otherwise check the + # setuptools default. + if self.distribution.use_2to3 is not True: + return + if not files: + return + log.info("Fixing " + " ".join(files)) + self.__build_fixer_names() + self.__exclude_fixers() + if doctests: + if setuptools.run_2to3_on_doctests: + r = DistutilsRefactoringTool(self.fixer_names) + r.refactor(files, write=True, doctests_only=True) + else: + _Mixin2to3.run_2to3(self, files) + + def __build_fixer_names(self): + if self.fixer_names: + return + self.fixer_names = [] + for p in setuptools.lib2to3_fixer_packages: + self.fixer_names.extend(get_fixers_from_package(p)) + if self.distribution.use_2to3_fixers is not None: + for p in self.distribution.use_2to3_fixers: + self.fixer_names.extend(get_fixers_from_package(p)) + + def __exclude_fixers(self): + excluded_fixers = getattr(self, 'exclude_fixers', []) + if self.distribution.use_2to3_exclude_fixers is not None: + excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers) + for fixer_name in excluded_fixers: + if fixer_name in self.fixer_names: + self.fixer_names.remove(fixer_name) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/monkey.py b/backend/test/lib/python3.8/site-packages/setuptools/monkey.py new file mode 100644 index 0000000000000000000000000000000000000000..3c77f8cf27f0ab1e71d64cfc114ef9d1bf72295c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/monkey.py @@ -0,0 +1,179 @@ +""" +Monkey patching of distutils. +""" + +import sys +import distutils.filelist +import platform +import types +import functools +from importlib import import_module +import inspect + +from setuptools.extern import six + +import setuptools + +__all__ = [] +""" +Everything is private. Contact the project team +if you think you need this functionality. +""" + + +def _get_mro(cls): + """ + Returns the bases classes for cls sorted by the MRO. + + Works around an issue on Jython where inspect.getmro will not return all + base classes if multiple classes share the same name. Instead, this + function will return a tuple containing the class itself, and the contents + of cls.__bases__. See https://github.com/pypa/setuptools/issues/1024. + """ + if platform.python_implementation() == "Jython": + return (cls,) + cls.__bases__ + return inspect.getmro(cls) + + +def get_unpatched(item): + lookup = ( + get_unpatched_class if isinstance(item, six.class_types) else + get_unpatched_function if isinstance(item, types.FunctionType) else + lambda item: None + ) + return lookup(item) + + +def get_unpatched_class(cls): + """Protect against re-patching the distutils if reloaded + + Also ensures that no other distutils extension monkeypatched the distutils + first. + """ + external_bases = ( + cls + for cls in _get_mro(cls) + if not cls.__module__.startswith('setuptools') + ) + base = next(external_bases) + if not base.__module__.startswith('distutils'): + msg = "distutils has already been patched by %r" % cls + raise AssertionError(msg) + return base + + +def patch_all(): + # we can't patch distutils.cmd, alas + distutils.core.Command = setuptools.Command + + has_issue_12885 = sys.version_info <= (3, 5, 3) + + if has_issue_12885: + # fix findall bug in distutils (http://bugs.python.org/issue12885) + distutils.filelist.findall = setuptools.findall + + needs_warehouse = ( + sys.version_info < (2, 7, 13) + or + (3, 4) < sys.version_info < (3, 4, 6) + or + (3, 5) < sys.version_info <= (3, 5, 3) + ) + + if needs_warehouse: + warehouse = 'https://upload.pypi.org/legacy/' + distutils.config.PyPIRCCommand.DEFAULT_REPOSITORY = warehouse + + _patch_distribution_metadata() + + # Install Distribution throughout the distutils + for module in distutils.dist, distutils.core, distutils.cmd: + module.Distribution = setuptools.dist.Distribution + + # Install the patched Extension + distutils.core.Extension = setuptools.extension.Extension + distutils.extension.Extension = setuptools.extension.Extension + if 'distutils.command.build_ext' in sys.modules: + sys.modules['distutils.command.build_ext'].Extension = ( + setuptools.extension.Extension + ) + + patch_for_msvc_specialized_compiler() + + +def _patch_distribution_metadata(): + """Patch write_pkg_file and read_pkg_file for higher metadata standards""" + for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'): + new_val = getattr(setuptools.dist, attr) + setattr(distutils.dist.DistributionMetadata, attr, new_val) + + +def patch_func(replacement, target_mod, func_name): + """ + Patch func_name in target_mod with replacement + + Important - original must be resolved by name to avoid + patching an already patched function. + """ + original = getattr(target_mod, func_name) + + # set the 'unpatched' attribute on the replacement to + # point to the original. + vars(replacement).setdefault('unpatched', original) + + # replace the function in the original module + setattr(target_mod, func_name, replacement) + + +def get_unpatched_function(candidate): + return getattr(candidate, 'unpatched') + + +def patch_for_msvc_specialized_compiler(): + """ + Patch functions in distutils to use standalone Microsoft Visual C++ + compilers. + """ + # import late to avoid circular imports on Python < 3.5 + msvc = import_module('setuptools.msvc') + + if platform.system() != 'Windows': + # Compilers only availables on Microsoft Windows + return + + def patch_params(mod_name, func_name): + """ + Prepare the parameters for patch_func to patch indicated function. + """ + repl_prefix = 'msvc9_' if 'msvc9' in mod_name else 'msvc14_' + repl_name = repl_prefix + func_name.lstrip('_') + repl = getattr(msvc, repl_name) + mod = import_module(mod_name) + if not hasattr(mod, func_name): + raise ImportError(func_name) + return repl, mod, func_name + + # Python 2.7 to 3.4 + msvc9 = functools.partial(patch_params, 'distutils.msvc9compiler') + + # Python 3.5+ + msvc14 = functools.partial(patch_params, 'distutils._msvccompiler') + + try: + # Patch distutils.msvc9compiler + patch_func(*msvc9('find_vcvarsall')) + patch_func(*msvc9('query_vcvarsall')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler._get_vc_env + patch_func(*msvc14('_get_vc_env')) + except ImportError: + pass + + try: + # Patch distutils._msvccompiler.gen_lib_options for Numpy + patch_func(*msvc14('gen_lib_options')) + except ImportError: + pass diff --git a/backend/test/lib/python3.8/site-packages/setuptools/msvc.py b/backend/test/lib/python3.8/site-packages/setuptools/msvc.py new file mode 100644 index 0000000000000000000000000000000000000000..2ffe1c81ee629c98246e9e72bf630431fa7905b6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/msvc.py @@ -0,0 +1,1679 @@ +""" +Improved support for Microsoft Visual C++ compilers. + +Known supported compilers: +-------------------------- +Microsoft Visual C++ 9.0: + Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64) + Microsoft Windows SDK 6.1 (x86, x64, ia64) + Microsoft Windows SDK 7.0 (x86, x64, ia64) + +Microsoft Visual C++ 10.0: + Microsoft Windows SDK 7.1 (x86, x64, ia64) + +Microsoft Visual C++ 14.X: + Microsoft Visual C++ Build Tools 2015 (x86, x64, arm) + Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64) + Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64) + +This may also support compilers shipped with compatible Visual Studio versions. +""" + +import json +from io import open +from os import listdir, pathsep +from os.path import join, isfile, isdir, dirname +import sys +import platform +import itertools +import distutils.errors +from setuptools.extern.packaging.version import LegacyVersion + +from setuptools.extern.six.moves import filterfalse + +from .monkey import get_unpatched + +if platform.system() == 'Windows': + from setuptools.extern.six.moves import winreg + from os import environ +else: + # Mock winreg and environ so the module can be imported on this platform. + + class winreg: + HKEY_USERS = None + HKEY_CURRENT_USER = None + HKEY_LOCAL_MACHINE = None + HKEY_CLASSES_ROOT = None + + environ = dict() + +_msvc9_suppress_errors = ( + # msvc9compiler isn't available on some platforms + ImportError, + + # msvc9compiler raises DistutilsPlatformError in some + # environments. See #1118. + distutils.errors.DistutilsPlatformError, +) + +try: + from distutils.msvc9compiler import Reg +except _msvc9_suppress_errors: + pass + + +def msvc9_find_vcvarsall(version): + """ + Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone + compiler build for Python + (VCForPython / Microsoft Visual C++ Compiler for Python 2.7). + + Fall back to original behavior when the standalone compiler is not + available. + + Redirect the path of "vcvarsall.bat". + + Parameters + ---------- + version: float + Required Microsoft Visual C++ version. + + Return + ------ + str + vcvarsall.bat path + """ + vc_base = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' + key = vc_base % ('', version) + try: + # Per-user installs register the compiler path here + productdir = Reg.get_value(key, "installdir") + except KeyError: + try: + # All-user installs on a 64-bit system register here + key = vc_base % ('Wow6432Node\\', version) + productdir = Reg.get_value(key, "installdir") + except KeyError: + productdir = None + + if productdir: + vcvarsall = join(productdir, "vcvarsall.bat") + if isfile(vcvarsall): + return vcvarsall + + return get_unpatched(msvc9_find_vcvarsall)(version) + + +def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs): + """ + Patched "distutils.msvc9compiler.query_vcvarsall" for support extra + Microsoft Visual C++ 9.0 and 10.0 compilers. + + Set environment without use of "vcvarsall.bat". + + Parameters + ---------- + ver: float + Required Microsoft Visual C++ version. + arch: str + Target architecture. + + Return + ------ + dict + environment + """ + # Try to get environment from vcvarsall.bat (Classical way) + try: + orig = get_unpatched(msvc9_query_vcvarsall) + return orig(ver, arch, *args, **kwargs) + except distutils.errors.DistutilsPlatformError: + # Pass error if Vcvarsall.bat is missing + pass + except ValueError: + # Pass error if environment not set after executing vcvarsall.bat + pass + + # If error, try to set environment directly + try: + return EnvironmentInfo(arch, ver).return_env() + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, ver, arch) + raise + + +def msvc14_get_vc_env(plat_spec): + """ + Patched "distutils._msvccompiler._get_vc_env" for support extra + Microsoft Visual C++ 14.X compilers. + + Set environment without use of "vcvarsall.bat". + + Parameters + ---------- + plat_spec: str + Target architecture. + + Return + ------ + dict + environment + """ + # Try to get environment from vcvarsall.bat (Classical way) + try: + return get_unpatched(msvc14_get_vc_env)(plat_spec) + except distutils.errors.DistutilsPlatformError: + # Pass error Vcvarsall.bat is missing + pass + + # If error, try to set environment directly + try: + return EnvironmentInfo(plat_spec, vc_min_ver=14.0).return_env() + except distutils.errors.DistutilsPlatformError as exc: + _augment_exception(exc, 14.0) + raise + + +def msvc14_gen_lib_options(*args, **kwargs): + """ + Patched "distutils._msvccompiler.gen_lib_options" for fix + compatibility between "numpy.distutils" and "distutils._msvccompiler" + (for Numpy < 1.11.2) + """ + if "numpy.distutils" in sys.modules: + import numpy as np + if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'): + return np.distutils.ccompiler.gen_lib_options(*args, **kwargs) + return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs) + + +def _augment_exception(exc, version, arch=''): + """ + Add details to the exception message to help guide the user + as to what action will resolve it. + """ + # Error if MSVC++ directory not found or environment not set + message = exc.args[0] + + if "vcvarsall" in message.lower() or "visual c" in message.lower(): + # Special error message if MSVC++ not installed + tmpl = 'Microsoft Visual C++ {version:0.1f} is required.' + message = tmpl.format(**locals()) + msdownload = 'www.microsoft.com/download/details.aspx?id=%d' + if version == 9.0: + if arch.lower().find('ia64') > -1: + # For VC++ 9.0, if IA64 support is needed, redirect user + # to Windows SDK 7.0. + # Note: No download link available from Microsoft. + message += ' Get it with "Microsoft Windows SDK 7.0"' + else: + # For VC++ 9.0 redirect user to Vc++ for Python 2.7 : + # This redirection link is maintained by Microsoft. + # Contact vspython@microsoft.com if it needs updating. + message += ' Get it from http://aka.ms/vcpython27' + elif version == 10.0: + # For VC++ 10.0 Redirect user to Windows SDK 7.1 + message += ' Get it with "Microsoft Windows SDK 7.1": ' + message += msdownload % 8279 + elif version >= 14.0: + # For VC++ 14.X Redirect user to latest Visual C++ Build Tools + message += (' Get it with "Build Tools for Visual Studio": ' + r'https://visualstudio.microsoft.com/downloads/') + + exc.args = (message, ) + + +class PlatformInfo: + """ + Current and Target Architectures information. + + Parameters + ---------- + arch: str + Target architecture. + """ + current_cpu = environ.get('processor_architecture', '').lower() + + def __init__(self, arch): + self.arch = arch.lower().replace('x64', 'amd64') + + @property + def target_cpu(self): + """ + Return Target CPU architecture. + + Return + ------ + str + Target CPU + """ + return self.arch[self.arch.find('_') + 1:] + + def target_is_x86(self): + """ + Return True if target CPU is x86 32 bits.. + + Return + ------ + bool + CPU is x86 32 bits + """ + return self.target_cpu == 'x86' + + def current_is_x86(self): + """ + Return True if current CPU is x86 32 bits.. + + Return + ------ + bool + CPU is x86 32 bits + """ + return self.current_cpu == 'x86' + + def current_dir(self, hidex86=False, x64=False): + """ + Current platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + str + subfolder: '\target', or '' (see hidex86 parameter) + """ + return ( + '' if (self.current_cpu == 'x86' and hidex86) else + r'\x64' if (self.current_cpu == 'amd64' and x64) else + r'\%s' % self.current_cpu + ) + + def target_dir(self, hidex86=False, x64=False): + r""" + Target platform specific subfolder. + + Parameters + ---------- + hidex86: bool + return '' and not '\x86' if architecture is x86. + x64: bool + return '\x64' and not '\amd64' if architecture is amd64. + + Return + ------ + str + subfolder: '\current', or '' (see hidex86 parameter) + """ + return ( + '' if (self.target_cpu == 'x86' and hidex86) else + r'\x64' if (self.target_cpu == 'amd64' and x64) else + r'\%s' % self.target_cpu + ) + + def cross_dir(self, forcex86=False): + r""" + Cross platform specific subfolder. + + Parameters + ---------- + forcex86: bool + Use 'x86' as current architecture even if current architecture is + not x86. + + Return + ------ + str + subfolder: '' if target architecture is current architecture, + '\current_target' if not. + """ + current = 'x86' if forcex86 else self.current_cpu + return ( + '' if self.target_cpu == current else + self.target_dir().replace('\\', '\\%s_' % current) + ) + + +class RegistryInfo: + """ + Microsoft Visual Studio related registry information. + + Parameters + ---------- + platform_info: PlatformInfo + "PlatformInfo" instance. + """ + HKEYS = (winreg.HKEY_USERS, + winreg.HKEY_CURRENT_USER, + winreg.HKEY_LOCAL_MACHINE, + winreg.HKEY_CLASSES_ROOT) + + def __init__(self, platform_info): + self.pi = platform_info + + @property + def visualstudio(self): + """ + Microsoft Visual Studio root registry key. + + Return + ------ + str + Registry key + """ + return 'VisualStudio' + + @property + def sxs(self): + """ + Microsoft Visual Studio SxS registry key. + + Return + ------ + str + Registry key + """ + return join(self.visualstudio, 'SxS') + + @property + def vc(self): + """ + Microsoft Visual C++ VC7 registry key. + + Return + ------ + str + Registry key + """ + return join(self.sxs, 'VC7') + + @property + def vs(self): + """ + Microsoft Visual Studio VS7 registry key. + + Return + ------ + str + Registry key + """ + return join(self.sxs, 'VS7') + + @property + def vc_for_python(self): + """ + Microsoft Visual C++ for Python registry key. + + Return + ------ + str + Registry key + """ + return r'DevDiv\VCForPython' + + @property + def microsoft_sdk(self): + """ + Microsoft SDK registry key. + + Return + ------ + str + Registry key + """ + return 'Microsoft SDKs' + + @property + def windows_sdk(self): + """ + Microsoft Windows/Platform SDK registry key. + + Return + ------ + str + Registry key + """ + return join(self.microsoft_sdk, 'Windows') + + @property + def netfx_sdk(self): + """ + Microsoft .NET Framework SDK registry key. + + Return + ------ + str + Registry key + """ + return join(self.microsoft_sdk, 'NETFXSDK') + + @property + def windows_kits_roots(self): + """ + Microsoft Windows Kits Roots registry key. + + Return + ------ + str + Registry key + """ + return r'Windows Kits\Installed Roots' + + def microsoft(self, key, x86=False): + """ + Return key in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + x86: str + Force x86 software registry. + + Return + ------ + str + Registry key + """ + node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node' + return join('Software', node64, 'Microsoft', key) + + def lookup(self, key, name): + """ + Look for values in registry in Microsoft software registry. + + Parameters + ---------- + key: str + Registry key path where look. + name: str + Value name to find. + + Return + ------ + str + value + """ + key_read = winreg.KEY_READ + openkey = winreg.OpenKey + ms = self.microsoft + for hkey in self.HKEYS: + try: + bkey = openkey(hkey, ms(key), 0, key_read) + except (OSError, IOError): + if not self.pi.current_is_x86(): + try: + bkey = openkey(hkey, ms(key, True), 0, key_read) + except (OSError, IOError): + continue + else: + continue + try: + return winreg.QueryValueEx(bkey, name)[0] + except (OSError, IOError): + pass + + +class SystemInfo: + """ + Microsoft Windows and Visual Studio related system information. + + Parameters + ---------- + registry_info: RegistryInfo + "RegistryInfo" instance. + vc_ver: float + Required Microsoft Visual C++ version. + """ + + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparison. + WinDir = environ.get('WinDir', '') + ProgramFiles = environ.get('ProgramFiles', '') + ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles) + + def __init__(self, registry_info, vc_ver=None): + self.ri = registry_info + self.pi = self.ri.pi + + self.known_vs_paths = self.find_programdata_vs_vers() + + # Except for VS15+, VC version is aligned with VS version + self.vs_ver = self.vc_ver = ( + vc_ver or self._find_latest_available_vs_ver()) + + def _find_latest_available_vs_ver(self): + """ + Find the latest VC version + + Return + ------ + float + version + """ + reg_vc_vers = self.find_reg_vs_vers() + + if not (reg_vc_vers or self.known_vs_paths): + raise distutils.errors.DistutilsPlatformError( + 'No Microsoft Visual C++ version found') + + vc_vers = set(reg_vc_vers) + vc_vers.update(self.known_vs_paths) + return sorted(vc_vers)[-1] + + def find_reg_vs_vers(self): + """ + Find Microsoft Visual Studio versions available in registry. + + Return + ------ + list of float + Versions + """ + ms = self.ri.microsoft + vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) + vs_vers = [] + for hkey in self.ri.HKEYS: + for key in vckeys: + try: + bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) + except (OSError, IOError): + continue + subkeys, values, _ = winreg.QueryInfoKey(bkey) + for i in range(values): + try: + ver = float(winreg.EnumValue(bkey, i)[0]) + if ver not in vs_vers: + vs_vers.append(ver) + except ValueError: + pass + for i in range(subkeys): + try: + ver = float(winreg.EnumKey(bkey, i)) + if ver not in vs_vers: + vs_vers.append(ver) + except ValueError: + pass + return sorted(vs_vers) + + def find_programdata_vs_vers(self): + r""" + Find Visual studio 2017+ versions from information in + "C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances". + + Return + ------ + dict + float version as key, path as value. + """ + vs_versions = {} + instances_dir = \ + r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances' + + try: + hashed_names = listdir(instances_dir) + + except (OSError, IOError): + # Directory not exists with all Visual Studio versions + return vs_versions + + for name in hashed_names: + try: + # Get VS installation path from "state.json" file + state_path = join(instances_dir, name, 'state.json') + with open(state_path, 'rt', encoding='utf-8') as state_file: + state = json.load(state_file) + vs_path = state['installationPath'] + + # Raises OSError if this VS installation does not contain VC + listdir(join(vs_path, r'VC\Tools\MSVC')) + + # Store version and path + vs_versions[self._as_float_version( + state['installationVersion'])] = vs_path + + except (OSError, IOError, KeyError): + # Skip if "state.json" file is missing or bad format + continue + + return vs_versions + + @staticmethod + def _as_float_version(version): + """ + Return a string version as a simplified float version (major.minor) + + Parameters + ---------- + version: str + Version. + + Return + ------ + float + version + """ + return float('.'.join(version.split('.')[:2])) + + @property + def VSInstallDir(self): + """ + Microsoft Visual Studio directory. + + Return + ------ + str + path + """ + # Default path + default = join(self.ProgramFilesx86, + 'Microsoft Visual Studio %0.1f' % self.vs_ver) + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default + + @property + def VCInstallDir(self): + """ + Microsoft Visual C++ directory. + + Return + ------ + str + path + """ + path = self._guess_vc() or self._guess_vc_legacy() + + if not isdir(path): + msg = 'Microsoft Visual C++ directory not found' + raise distutils.errors.DistutilsPlatformError(msg) + + return path + + def _guess_vc(self): + """ + Locate Visual C++ for VS2017+. + + Return + ------ + str + path + """ + if self.vs_ver <= 14.0: + return '' + + try: + # First search in known VS paths + vs_dir = self.known_vs_paths[self.vs_ver] + except KeyError: + # Else, search with path from registry + vs_dir = self.VSInstallDir + + guess_vc = join(vs_dir, r'VC\Tools\MSVC') + + # Subdir with VC exact version as name + try: + # Update the VC version with real one instead of VS version + vc_ver = listdir(guess_vc)[-1] + self.vc_ver = self._as_float_version(vc_ver) + return join(guess_vc, vc_ver) + except (OSError, IOError, IndexError): + return '' + + def _guess_vc_legacy(self): + """ + Locate Visual C++ for versions prior to 2017. + + Return + ------ + str + path + """ + default = join(self.ProgramFilesx86, + r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver) + + # Try to get "VC++ for Python" path from registry as default path + reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver) + python_vc = self.ri.lookup(reg_path, 'installdir') + default_vc = join(python_vc, 'VC') if python_vc else default + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc + + @property + def WindowsSdkVersion(self): + """ + Microsoft Windows SDK versions for specified MSVC++ version. + + Return + ------ + tuple of str + versions + """ + if self.vs_ver <= 9.0: + return '7.0', '6.1', '6.0a' + elif self.vs_ver == 10.0: + return '7.1', '7.0a' + elif self.vs_ver == 11.0: + return '8.0', '8.0a' + elif self.vs_ver == 12.0: + return '8.1', '8.1a' + elif self.vs_ver >= 14.0: + return '10.0', '8.1' + + @property + def WindowsSdkLastVersion(self): + """ + Microsoft Windows SDK last version. + + Return + ------ + str + version + """ + return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib')) + + @property + def WindowsSdkDir(self): + """ + Microsoft Windows SDK directory. + + Return + ------ + str + path + """ + sdkdir = '' + for ver in self.WindowsSdkVersion: + # Try to get it from registry + loc = join(self.ri.windows_sdk, 'v%s' % ver) + sdkdir = self.ri.lookup(loc, 'installationfolder') + if sdkdir: + break + if not sdkdir or not isdir(sdkdir): + # Try to get "VC++ for Python" version from registry + path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver) + install_base = self.ri.lookup(path, 'installdir') + if install_base: + sdkdir = join(install_base, 'WinSDK') + if not sdkdir or not isdir(sdkdir): + # If fail, use default new path + for ver in self.WindowsSdkVersion: + intver = ver[:ver.rfind('.')] + path = r'Microsoft SDKs\Windows Kits\%s' % intver + d = join(self.ProgramFiles, path) + if isdir(d): + sdkdir = d + if not sdkdir or not isdir(sdkdir): + # If fail, use default old path + for ver in self.WindowsSdkVersion: + path = r'Microsoft SDKs\Windows\v%s' % ver + d = join(self.ProgramFiles, path) + if isdir(d): + sdkdir = d + if not sdkdir: + # If fail, use Platform SDK + sdkdir = join(self.VCInstallDir, 'PlatformSDK') + return sdkdir + + @property + def WindowsSDKExecutablePath(self): + """ + Microsoft Windows SDK executable directory. + + Return + ------ + str + path + """ + # Find WinSDK NetFx Tools registry dir name + if self.vs_ver <= 11.0: + netfxver = 35 + arch = '' + else: + netfxver = 40 + hidex86 = True if self.vs_ver <= 12.0 else False + arch = self.pi.current_dir(x64=True, hidex86=hidex86) + fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-')) + + # list all possibles registry paths + regpaths = [] + if self.vs_ver >= 14.0: + for ver in self.NetFxSdkVersion: + regpaths += [join(self.ri.netfx_sdk, ver, fx)] + + for ver in self.WindowsSdkVersion: + regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)] + + # Return installation folder from the more recent path + for path in regpaths: + execpath = self.ri.lookup(path, 'installationfolder') + if execpath: + return execpath + + @property + def FSharpInstallDir(self): + """ + Microsoft Visual F# directory. + + Return + ------ + str + path + """ + path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver) + return self.ri.lookup(path, 'productdir') or '' + + @property + def UniversalCRTSdkDir(self): + """ + Microsoft Universal CRT SDK directory. + + Return + ------ + str + path + """ + # Set Kit Roots versions for specified MSVC++ version + vers = ('10', '81') if self.vs_ver >= 14.0 else () + + # Find path of the more recent Kit + for ver in vers: + sdkdir = self.ri.lookup(self.ri.windows_kits_roots, + 'kitsroot%s' % ver) + if sdkdir: + return sdkdir or '' + + @property + def UniversalCRTSdkLastVersion(self): + """ + Microsoft Universal C Runtime SDK last version. + + Return + ------ + str + version + """ + return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib')) + + @property + def NetFxSdkVersion(self): + """ + Microsoft .NET Framework SDK versions. + + Return + ------ + tuple of str + versions + """ + # Set FxSdk versions for specified VS version + return (('4.7.2', '4.7.1', '4.7', + '4.6.2', '4.6.1', '4.6', + '4.5.2', '4.5.1', '4.5') + if self.vs_ver >= 14.0 else ()) + + @property + def NetFxSdkDir(self): + """ + Microsoft .NET Framework SDK directory. + + Return + ------ + str + path + """ + sdkdir = '' + for ver in self.NetFxSdkVersion: + loc = join(self.ri.netfx_sdk, ver) + sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder') + if sdkdir: + break + return sdkdir + + @property + def FrameworkDir32(self): + """ + Microsoft .NET Framework 32bit directory. + + Return + ------ + str + path + """ + # Default path + guess_fw = join(self.WinDir, r'Microsoft.NET\Framework') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw + + @property + def FrameworkDir64(self): + """ + Microsoft .NET Framework 64bit directory. + + Return + ------ + str + path + """ + # Default path + guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64') + + # Try to get path from registry, if fail use default path + return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw + + @property + def FrameworkVersion32(self): + """ + Microsoft .NET Framework 32bit versions. + + Return + ------ + tuple of str + versions + """ + return self._find_dot_net_versions(32) + + @property + def FrameworkVersion64(self): + """ + Microsoft .NET Framework 64bit versions. + + Return + ------ + tuple of str + versions + """ + return self._find_dot_net_versions(64) + + def _find_dot_net_versions(self, bits): + """ + Find Microsoft .NET Framework versions. + + Parameters + ---------- + bits: int + Platform number of bits: 32 or 64. + + Return + ------ + tuple of str + versions + """ + # Find actual .NET version in registry + reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits) + dot_net_dir = getattr(self, 'FrameworkDir%d' % bits) + ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or '' + + # Set .NET versions for specified MSVC++ version + if self.vs_ver >= 12.0: + return ver, 'v4.0' + elif self.vs_ver >= 10.0: + return 'v4.0.30319' if ver.lower()[:2] != 'v4' else ver, 'v3.5' + elif self.vs_ver == 9.0: + return 'v3.5', 'v2.0.50727' + elif self.vs_ver == 8.0: + return 'v3.0', 'v2.0.50727' + + @staticmethod + def _use_last_dir_name(path, prefix=''): + """ + Return name of the last dir in path or '' if no dir found. + + Parameters + ---------- + path: str + Use dirs in this path + prefix: str + Use only dirs starting by this prefix + + Return + ------ + str + name + """ + matching_dirs = ( + dir_name + for dir_name in reversed(listdir(path)) + if isdir(join(path, dir_name)) and + dir_name.startswith(prefix) + ) + return next(matching_dirs, None) or '' + + +class EnvironmentInfo: + """ + Return environment variables for specified Microsoft Visual C++ version + and platform : Lib, Include, Path and libpath. + + This function is compatible with Microsoft Visual C++ 9.0 to 14.X. + + Script created by analysing Microsoft environment configuration files like + "vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ... + + Parameters + ---------- + arch: str + Target architecture. + vc_ver: float + Required Microsoft Visual C++ version. If not set, autodetect the last + version. + vc_min_ver: float + Minimum Microsoft Visual C++ version. + """ + + # Variables and properties in this class use originals CamelCase variables + # names from Microsoft source files for more easy comparison. + + def __init__(self, arch, vc_ver=None, vc_min_ver=0): + self.pi = PlatformInfo(arch) + self.ri = RegistryInfo(self.pi) + self.si = SystemInfo(self.ri, vc_ver) + + if self.vc_ver < vc_min_ver: + err = 'No suitable Microsoft Visual C++ version found' + raise distutils.errors.DistutilsPlatformError(err) + + @property + def vs_ver(self): + """ + Microsoft Visual Studio. + + Return + ------ + float + version + """ + return self.si.vs_ver + + @property + def vc_ver(self): + """ + Microsoft Visual C++ version. + + Return + ------ + float + version + """ + return self.si.vc_ver + + @property + def VSTools(self): + """ + Microsoft Visual Studio Tools. + + Return + ------ + list of str + paths + """ + paths = [r'Common7\IDE', r'Common7\Tools'] + + if self.vs_ver >= 14.0: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow'] + paths += [r'Team Tools\Performance Tools'] + paths += [r'Team Tools\Performance Tools%s' % arch_subdir] + + return [join(self.si.VSInstallDir, path) for path in paths] + + @property + def VCIncludes(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Includes. + + Return + ------ + list of str + paths + """ + return [join(self.si.VCInstallDir, 'Include'), + join(self.si.VCInstallDir, r'ATLMFC\Include')] + + @property + def VCLibraries(self): + """ + Microsoft Visual C++ & Microsoft Foundation Class Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver >= 15.0: + arch_subdir = self.pi.target_dir(x64=True) + else: + arch_subdir = self.pi.target_dir(hidex86=True) + paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir] + + if self.vs_ver >= 14.0: + paths += [r'Lib\store%s' % arch_subdir] + + return [join(self.si.VCInstallDir, path) for path in paths] + + @property + def VCStoreRefs(self): + """ + Microsoft Visual C++ store references Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0: + return [] + return [join(self.si.VCInstallDir, r'Lib\store\references')] + + @property + def VCTools(self): + """ + Microsoft Visual C++ Tools. + + Return + ------ + list of str + paths + """ + si = self.si + tools = [join(si.VCInstallDir, 'VCPackages')] + + forcex86 = True if self.vs_ver <= 10.0 else False + arch_subdir = self.pi.cross_dir(forcex86) + if arch_subdir: + tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)] + + if self.vs_ver == 14.0: + path = 'Bin%s' % self.pi.current_dir(hidex86=True) + tools += [join(si.VCInstallDir, path)] + + elif self.vs_ver >= 15.0: + host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else + r'bin\HostX64%s') + tools += [join( + si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))] + + if self.pi.current_cpu != self.pi.target_cpu: + tools += [join( + si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))] + + else: + tools += [join(si.VCInstallDir, 'Bin')] + + return tools + + @property + def OSLibraries(self): + """ + Microsoft Windows SDK Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver <= 10.0: + arch_subdir = self.pi.target_dir(hidex86=True, x64=True) + return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)] + + else: + arch_subdir = self.pi.target_dir(x64=True) + lib = join(self.si.WindowsSdkDir, 'lib') + libver = self._sdk_subdir + return [join(lib, '%sum%s' % (libver , arch_subdir))] + + @property + def OSIncludes(self): + """ + Microsoft Windows SDK Include. + + Return + ------ + list of str + paths + """ + include = join(self.si.WindowsSdkDir, 'include') + + if self.vs_ver <= 10.0: + return [include, join(include, 'gl')] + + else: + if self.vs_ver >= 14.0: + sdkver = self._sdk_subdir + else: + sdkver = '' + return [join(include, '%sshared' % sdkver), + join(include, '%sum' % sdkver), + join(include, '%swinrt' % sdkver)] + + @property + def OSLibpath(self): + """ + Microsoft Windows SDK Libraries Paths. + + Return + ------ + list of str + paths + """ + ref = join(self.si.WindowsSdkDir, 'References') + libpath = [] + + if self.vs_ver <= 9.0: + libpath += self.OSLibraries + + if self.vs_ver >= 11.0: + libpath += [join(ref, r'CommonConfiguration\Neutral')] + + if self.vs_ver >= 14.0: + libpath += [ + ref, + join(self.si.WindowsSdkDir, 'UnionMetadata'), + join(ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'), + join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'), + join(ref,'Windows.Networking.Connectivity.WwanContract', + '1.0.0.0'), + join(self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs', + '%0.1f' % self.vs_ver, 'References', 'CommonConfiguration', + 'neutral'), + ] + return libpath + + @property + def SdkTools(self): + """ + Microsoft Windows SDK Tools. + + Return + ------ + list of str + paths + """ + return list(self._sdk_tools()) + + def _sdk_tools(self): + """ + Microsoft Windows SDK Tools paths generator. + + Return + ------ + generator of str + paths + """ + if self.vs_ver < 15.0: + bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86' + yield join(self.si.WindowsSdkDir, bin_dir) + + if not self.pi.current_is_x86(): + arch_subdir = self.pi.current_dir(x64=True) + path = 'Bin%s' % arch_subdir + yield join(self.si.WindowsSdkDir, path) + + if self.vs_ver in (10.0, 11.0): + if self.pi.target_is_x86(): + arch_subdir = '' + else: + arch_subdir = self.pi.current_dir(hidex86=True, x64=True) + path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir + yield join(self.si.WindowsSdkDir, path) + + elif self.vs_ver >= 15.0: + path = join(self.si.WindowsSdkDir, 'Bin') + arch_subdir = self.pi.current_dir(x64=True) + sdkver = self.si.WindowsSdkLastVersion + yield join(path, '%s%s' % (sdkver, arch_subdir)) + + if self.si.WindowsSDKExecutablePath: + yield self.si.WindowsSDKExecutablePath + + @property + def _sdk_subdir(self): + """ + Microsoft Windows SDK version subdir. + + Return + ------ + str + subdir + """ + ucrtver = self.si.WindowsSdkLastVersion + return ('%s\\' % ucrtver) if ucrtver else '' + + @property + def SdkSetup(self): + """ + Microsoft Windows SDK Setup. + + Return + ------ + list of str + paths + """ + if self.vs_ver > 9.0: + return [] + + return [join(self.si.WindowsSdkDir, 'Setup')] + + @property + def FxTools(self): + """ + Microsoft .NET Framework Tools. + + Return + ------ + list of str + paths + """ + pi = self.pi + si = self.si + + if self.vs_ver <= 10.0: + include32 = True + include64 = not pi.target_is_x86() and not pi.current_is_x86() + else: + include32 = pi.target_is_x86() or pi.current_is_x86() + include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64' + + tools = [] + if include32: + tools += [join(si.FrameworkDir32, ver) + for ver in si.FrameworkVersion32] + if include64: + tools += [join(si.FrameworkDir64, ver) + for ver in si.FrameworkVersion64] + return tools + + @property + def NetFxSDKLibraries(self): + """ + Microsoft .Net Framework SDK Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)] + + @property + def NetFxSDKIncludes(self): + """ + Microsoft .Net Framework SDK Includes. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0 or not self.si.NetFxSdkDir: + return [] + + return [join(self.si.NetFxSdkDir, r'include\um')] + + @property + def VsTDb(self): + """ + Microsoft Visual Studio Team System Database. + + Return + ------ + list of str + paths + """ + return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')] + + @property + def MSBuild(self): + """ + Microsoft Build Engine. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 12.0: + return [] + elif self.vs_ver < 15.0: + base_path = self.si.ProgramFilesx86 + arch_subdir = self.pi.current_dir(hidex86=True) + else: + base_path = self.si.VSInstallDir + arch_subdir = '' + + path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir) + build = [join(base_path, path)] + + if self.vs_ver >= 15.0: + # Add Roslyn C# & Visual Basic Compiler + build += [join(base_path, path, 'Roslyn')] + + return build + + @property + def HTMLHelpWorkshop(self): + """ + Microsoft HTML Help Workshop. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 11.0: + return [] + + return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')] + + @property + def UCRTLibraries(self): + """ + Microsoft Universal C Runtime SDK Libraries. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0: + return [] + + arch_subdir = self.pi.target_dir(x64=True) + lib = join(self.si.UniversalCRTSdkDir, 'lib') + ucrtver = self._ucrt_subdir + return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))] + + @property + def UCRTIncludes(self): + """ + Microsoft Universal C Runtime SDK Include. + + Return + ------ + list of str + paths + """ + if self.vs_ver < 14.0: + return [] + + include = join(self.si.UniversalCRTSdkDir, 'include') + return [join(include, '%sucrt' % self._ucrt_subdir)] + + @property + def _ucrt_subdir(self): + """ + Microsoft Universal C Runtime SDK version subdir. + + Return + ------ + str + subdir + """ + ucrtver = self.si.UniversalCRTSdkLastVersion + return ('%s\\' % ucrtver) if ucrtver else '' + + @property + def FSharp(self): + """ + Microsoft Visual F#. + + Return + ------ + list of str + paths + """ + if 11.0 > self.vs_ver > 12.0: + return [] + + return [self.si.FSharpInstallDir] + + @property + def VCRuntimeRedist(self): + """ + Microsoft Visual C++ runtime redistributable dll. + + Return + ------ + str + path + """ + vcruntime = 'vcruntime%d0.dll' % self.vc_ver + arch_subdir = self.pi.target_dir(x64=True).strip('\\') + + # Installation prefixes candidates + prefixes = [] + tools_path = self.si.VCInstallDir + redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist')) + if isdir(redist_path): + # Redist version may not be exactly the same as tools + redist_path = join(redist_path, listdir(redist_path)[-1]) + prefixes += [redist_path, join(redist_path, 'onecore')] + + prefixes += [join(tools_path, 'redist')] # VS14 legacy path + + # CRT directory + crt_dirs = ('Microsoft.VC%d.CRT' % (self.vc_ver * 10), + # Sometime store in directory with VS version instead of VC + 'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10)) + + # vcruntime path + for prefix, crt_dir in itertools.product(prefixes, crt_dirs): + path = join(prefix, arch_subdir, crt_dir, vcruntime) + if isfile(path): + return path + + def return_env(self, exists=True): + """ + Return environment dict. + + Parameters + ---------- + exists: bool + It True, only return existing paths. + + Return + ------ + dict + environment + """ + env = dict( + include=self._build_paths('include', + [self.VCIncludes, + self.OSIncludes, + self.UCRTIncludes, + self.NetFxSDKIncludes], + exists), + lib=self._build_paths('lib', + [self.VCLibraries, + self.OSLibraries, + self.FxTools, + self.UCRTLibraries, + self.NetFxSDKLibraries], + exists), + libpath=self._build_paths('libpath', + [self.VCLibraries, + self.FxTools, + self.VCStoreRefs, + self.OSLibpath], + exists), + path=self._build_paths('path', + [self.VCTools, + self.VSTools, + self.VsTDb, + self.SdkTools, + self.SdkSetup, + self.FxTools, + self.MSBuild, + self.HTMLHelpWorkshop, + self.FSharp], + exists), + ) + if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist): + env['py_vcruntime_redist'] = self.VCRuntimeRedist + return env + + def _build_paths(self, name, spec_path_lists, exists): + """ + Given an environment variable name and specified paths, + return a pathsep-separated string of paths containing + unique, extant, directories from those paths and from + the environment variable. Raise an error if no paths + are resolved. + + Parameters + ---------- + name: str + Environment variable name + spec_path_lists: list of str + Paths + exists: bool + It True, only return existing paths. + + Return + ------ + str + Pathsep-separated paths + """ + # flatten spec_path_lists + spec_paths = itertools.chain.from_iterable(spec_path_lists) + env_paths = environ.get(name, '').split(pathsep) + paths = itertools.chain(spec_paths, env_paths) + extant_paths = list(filter(isdir, paths)) if exists else paths + if not extant_paths: + msg = "%s environment variable is empty" % name.upper() + raise distutils.errors.DistutilsPlatformError(msg) + unique_paths = self._unique_everseen(extant_paths) + return pathsep.join(unique_paths) + + # from Python docs + @staticmethod + def _unique_everseen(iterable, key=None): + """ + List unique elements, preserving order. + Remember all elements ever seen. + + _unique_everseen('AAAABBBCCDAABBB') --> A B C D + + _unique_everseen('ABBCcAD', str.lower) --> A B C D + """ + seen = set() + seen_add = seen.add + if key is None: + for element in filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element diff --git a/backend/test/lib/python3.8/site-packages/setuptools/namespaces.py b/backend/test/lib/python3.8/site-packages/setuptools/namespaces.py new file mode 100644 index 0000000000000000000000000000000000000000..dc16106d3dc7048a160129745756bbc9b1fb51d9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/namespaces.py @@ -0,0 +1,107 @@ +import os +from distutils import log +import itertools + +from setuptools.extern.six.moves import map + + +flatten = itertools.chain.from_iterable + + +class Installer: + + nspkg_ext = '-nspkg.pth' + + def install_namespaces(self): + nsp = self._get_all_ns_packages() + if not nsp: + return + filename, ext = os.path.splitext(self._get_target()) + filename += self.nspkg_ext + self.outputs.append(filename) + log.info("Installing %s", filename) + lines = map(self._gen_nspkg_line, nsp) + + if self.dry_run: + # always generate the lines, even in dry run + list(lines) + return + + with open(filename, 'wt') as f: + f.writelines(lines) + + def uninstall_namespaces(self): + filename, ext = os.path.splitext(self._get_target()) + filename += self.nspkg_ext + if not os.path.exists(filename): + return + log.info("Removing %s", filename) + os.remove(filename) + + def _get_target(self): + return self.target + + _nspkg_tmpl = ( + "import sys, types, os", + "has_mfs = sys.version_info > (3, 5)", + "p = os.path.join(%(root)s, *%(pth)r)", + "importlib = has_mfs and __import__('importlib.util')", + "has_mfs and __import__('importlib.machinery')", + "m = has_mfs and " + "sys.modules.setdefault(%(pkg)r, " + "importlib.util.module_from_spec(" + "importlib.machinery.PathFinder.find_spec(%(pkg)r, " + "[os.path.dirname(p)])))", + "m = m or " + "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", + "mp = (m or []) and m.__dict__.setdefault('__path__',[])", + "(p not in mp) and mp.append(p)", + ) + "lines for the namespace installer" + + _nspkg_tmpl_multi = ( + 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', + ) + "additional line(s) when a parent package is indicated" + + def _get_root(self): + return "sys._getframe(1).f_locals['sitedir']" + + def _gen_nspkg_line(self, pkg): + # ensure pkg is not a unicode string under Python 2.7 + pkg = str(pkg) + pth = tuple(pkg.split('.')) + root = self._get_root() + tmpl_lines = self._nspkg_tmpl + parent, sep, child = pkg.rpartition('.') + if parent: + tmpl_lines += self._nspkg_tmpl_multi + return ';'.join(tmpl_lines) % locals() + '\n' + + def _get_all_ns_packages(self): + """Return sorted list of all package namespaces""" + pkgs = self.distribution.namespace_packages or [] + return sorted(flatten(map(self._pkg_names, pkgs))) + + @staticmethod + def _pkg_names(pkg): + """ + Given a namespace package, yield the components of that + package. + + >>> names = Installer._pkg_names('a.b.c') + >>> set(names) == set(['a', 'a.b', 'a.b.c']) + True + """ + parts = pkg.split('.') + while parts: + yield '.'.join(parts) + parts.pop() + + +class DevelopInstaller(Installer): + def _get_root(self): + return repr(str(self.egg_path)) + + def _get_target(self): + return self.egg_link diff --git a/backend/test/lib/python3.8/site-packages/setuptools/package_index.py b/backend/test/lib/python3.8/site-packages/setuptools/package_index.py new file mode 100644 index 0000000000000000000000000000000000000000..9a2da9d5aca817555b919e7b09e9a2535cb9c1fd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/package_index.py @@ -0,0 +1,1136 @@ +"""PyPI and direct package downloading""" +import sys +import os +import re +import shutil +import socket +import base64 +import hashlib +import itertools +import warnings +from functools import wraps + +from setuptools.extern import six +from setuptools.extern.six.moves import urllib, http_client, configparser, map + +import setuptools +from pkg_resources import ( + CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, + Environment, find_distributions, safe_name, safe_version, + to_filename, Requirement, DEVELOP_DIST, EGG_DIST, +) +from setuptools import ssl_support +from distutils import log +from distutils.errors import DistutilsError +from fnmatch import translate +from setuptools.py27compat import get_all_headers +from setuptools.py33compat import unescape +from setuptools.wheel import Wheel + +__metaclass__ = type + +EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.+!]+)$') +HREF = re.compile(r"""href\s*=\s*['"]?([^'"> ]+)""", re.I) +PYPI_MD5 = re.compile( + r'<a href="([^"#]+)">([^<]+)</a>\n\s+\(<a (?:title="MD5 hash"\n\s+)' + r'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\)' +) +URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match +EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() + +__all__ = [ + 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', + 'interpret_distro_name', +] + +_SOCKET_TIMEOUT = 15 + +_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}" +user_agent = _tmpl.format(py_major='{}.{}'.format(*sys.version_info), setuptools=setuptools) + + +def parse_requirement_arg(spec): + try: + return Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % (spec,) + ) + + +def parse_bdist_wininst(name): + """Return (base,pyversion) or (None,None) for possible .exe name""" + + lower = name.lower() + base, py_ver, plat = None, None, None + + if lower.endswith('.exe'): + if lower.endswith('.win32.exe'): + base = name[:-10] + plat = 'win32' + elif lower.startswith('.win32-py', -16): + py_ver = name[-7:-4] + base = name[:-16] + plat = 'win32' + elif lower.endswith('.win-amd64.exe'): + base = name[:-14] + plat = 'win-amd64' + elif lower.startswith('.win-amd64-py', -20): + py_ver = name[-7:-4] + base = name[:-20] + plat = 'win-amd64' + return base, py_ver, plat + + +def egg_info_for_url(url): + parts = urllib.parse.urlparse(url) + scheme, server, path, parameters, query, fragment = parts + base = urllib.parse.unquote(path.split('/')[-1]) + if server == 'sourceforge.net' and base == 'download': # XXX Yuck + base = urllib.parse.unquote(path.split('/')[-2]) + if '#' in base: + base, fragment = base.split('#', 1) + return base, fragment + + +def distros_for_url(url, metadata=None): + """Yield egg or source distribution objects that might be found at a URL""" + base, fragment = egg_info_for_url(url) + for dist in distros_for_location(url, base, metadata): + yield dist + if fragment: + match = EGG_FRAGMENT.match(fragment) + if match: + for dist in interpret_distro_name( + url, match.group(1), metadata, precedence=CHECKOUT_DIST + ): + yield dist + + +def distros_for_location(location, basename, metadata=None): + """Yield egg or source distribution objects based on basename""" + if basename.endswith('.egg.zip'): + basename = basename[:-4] # strip the .zip + if basename.endswith('.egg') and '-' in basename: + # only one, unambiguous interpretation + return [Distribution.from_location(location, basename, metadata)] + if basename.endswith('.whl') and '-' in basename: + wheel = Wheel(basename) + if not wheel.is_compatible(): + return [] + return [Distribution( + location=location, + project_name=wheel.project_name, + version=wheel.version, + # Increase priority over eggs. + precedence=EGG_DIST + 1, + )] + if basename.endswith('.exe'): + win_base, py_ver, platform = parse_bdist_wininst(basename) + if win_base is not None: + return interpret_distro_name( + location, win_base, metadata, py_ver, BINARY_DIST, platform + ) + # Try source distro extensions (.zip, .tgz, etc.) + # + for ext in EXTENSIONS: + if basename.endswith(ext): + basename = basename[:-len(ext)] + return interpret_distro_name(location, basename, metadata) + return [] # no extension matched + + +def distros_for_filename(filename, metadata=None): + """Yield possible egg or source distribution objects based on a filename""" + return distros_for_location( + normalize_path(filename), os.path.basename(filename), metadata + ) + + +def interpret_distro_name( + location, basename, metadata, py_version=None, precedence=SOURCE_DIST, + platform=None +): + """Generate alternative interpretations of a source distro name + + Note: if `location` is a filesystem filename, you should call + ``pkg_resources.normalize_path()`` on it before passing it to this + routine! + """ + # Generate alternative interpretations of a source distro name + # Because some packages are ambiguous as to name/versions split + # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. + # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" + # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, + # the spurious interpretations should be ignored, because in the event + # there's also an "adns" package, the spurious "python-1.1.0" version will + # compare lower than any numeric version number, and is therefore unlikely + # to match a request for it. It's still a potential problem, though, and + # in the long run PyPI and the distutils should go for "safe" names and + # versions in distribution archive names (sdist and bdist). + + parts = basename.split('-') + if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]): + # it is a bdist_dumb, not an sdist -- bail out + return + + for p in range(1, len(parts) + 1): + yield Distribution( + location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), + py_version=py_version, precedence=precedence, + platform=platform + ) + + +# From Python 2.7 docs +def unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in six.moves.filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + + +def unique_values(func): + """ + Wrap a function returning an iterable such that the resulting iterable + only ever yields unique items. + """ + + @wraps(func) + def wrapper(*args, **kwargs): + return unique_everseen(func(*args, **kwargs)) + + return wrapper + + +REL = re.compile(r"""<([^>]*\srel\s{0,10}=\s{0,10}['"]?([^'" >]+)[^>]*)>""", re.I) +# this line is here to fix emacs' cruddy broken syntax highlighting + + +@unique_values +def find_external_links(url, page): + """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" + + for match in REL.finditer(page): + tag, rel = match.groups() + rels = set(map(str.strip, rel.lower().split(','))) + if 'homepage' in rels or 'download' in rels: + for match in HREF.finditer(tag): + yield urllib.parse.urljoin(url, htmldecode(match.group(1))) + + for tag in ("<th>Home Page", "<th>Download URL"): + pos = page.find(tag) + if pos != -1: + match = HREF.search(page, pos) + if match: + yield urllib.parse.urljoin(url, htmldecode(match.group(1))) + + +class ContentChecker: + """ + A null content checker that defines the interface for checking content + """ + + def feed(self, block): + """ + Feed a block of data to the hash. + """ + return + + def is_valid(self): + """ + Check the hash. Return False if validation fails. + """ + return True + + def report(self, reporter, template): + """ + Call reporter with information about the checker (hash name) + substituted into the template. + """ + return + + +class HashChecker(ContentChecker): + pattern = re.compile( + r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)=' + r'(?P<expected>[a-f0-9]+)' + ) + + def __init__(self, hash_name, expected): + self.hash_name = hash_name + self.hash = hashlib.new(hash_name) + self.expected = expected + + @classmethod + def from_url(cls, url): + "Construct a (possibly null) ContentChecker from a URL" + fragment = urllib.parse.urlparse(url)[-1] + if not fragment: + return ContentChecker() + match = cls.pattern.search(fragment) + if not match: + return ContentChecker() + return cls(**match.groupdict()) + + def feed(self, block): + self.hash.update(block) + + def is_valid(self): + return self.hash.hexdigest() == self.expected + + def report(self, reporter, template): + msg = template % self.hash_name + return reporter(msg) + + +class PackageIndex(Environment): + """A distribution index that scans web pages for download URLs""" + + def __init__( + self, index_url="https://pypi.org/simple/", hosts=('*',), + ca_bundle=None, verify_ssl=True, *args, **kw + ): + Environment.__init__(self, *args, **kw) + self.index_url = index_url + "/" [:not index_url.endswith('/')] + self.scanned_urls = {} + self.fetched_urls = {} + self.package_pages = {} + self.allows = re.compile('|'.join(map(translate, hosts))).match + self.to_scan = [] + use_ssl = ( + verify_ssl + and ssl_support.is_available + and (ca_bundle or ssl_support.find_ca_bundle()) + ) + if use_ssl: + self.opener = ssl_support.opener_for(ca_bundle) + else: + self.opener = urllib.request.urlopen + + def process_url(self, url, retrieve=False): + """Evaluate a URL as a possible download, and maybe retrieve it""" + if url in self.scanned_urls and not retrieve: + return + self.scanned_urls[url] = True + if not URL_SCHEME(url): + self.process_filename(url) + return + else: + dists = list(distros_for_url(url)) + if dists: + if not self.url_ok(url): + return + self.debug("Found link: %s", url) + + if dists or not retrieve or url in self.fetched_urls: + list(map(self.add, dists)) + return # don't need the actual page + + if not self.url_ok(url): + self.fetched_urls[url] = True + return + + self.info("Reading %s", url) + self.fetched_urls[url] = True # prevent multiple fetch attempts + tmpl = "Download error on %s: %%s -- Some packages may not be found!" + f = self.open_url(url, tmpl % url) + if f is None: + return + self.fetched_urls[f.url] = True + if 'html' not in f.headers.get('content-type', '').lower(): + f.close() # not html, we can't process it + return + + base = f.url # handle redirects + page = f.read() + if not isinstance(page, str): + # In Python 3 and got bytes but want str. + if isinstance(f, urllib.error.HTTPError): + # Errors have no charset, assume latin1: + charset = 'latin-1' + else: + charset = f.headers.get_param('charset') or 'latin-1' + page = page.decode(charset, "ignore") + f.close() + for match in HREF.finditer(page): + link = urllib.parse.urljoin(base, htmldecode(match.group(1))) + self.process_url(link) + if url.startswith(self.index_url) and getattr(f, 'code', None) != 404: + page = self.process_index(url, page) + + def process_filename(self, fn, nested=False): + # process filenames or directories + if not os.path.exists(fn): + self.warn("Not found: %s", fn) + return + + if os.path.isdir(fn) and not nested: + path = os.path.realpath(fn) + for item in os.listdir(path): + self.process_filename(os.path.join(path, item), True) + + dists = distros_for_filename(fn) + if dists: + self.debug("Found: %s", fn) + list(map(self.add, dists)) + + def url_ok(self, url, fatal=False): + s = URL_SCHEME(url) + is_file = s and s.group(1).lower() == 'file' + if is_file or self.allows(urllib.parse.urlparse(url)[1]): + return True + msg = ( + "\nNote: Bypassing %s (disallowed host; see " + "http://bit.ly/2hrImnY for details).\n") + if fatal: + raise DistutilsError(msg % url) + else: + self.warn(msg, url) + + def scan_egg_links(self, search_path): + dirs = filter(os.path.isdir, search_path) + egg_links = ( + (path, entry) + for path in dirs + for entry in os.listdir(path) + if entry.endswith('.egg-link') + ) + list(itertools.starmap(self.scan_egg_link, egg_links)) + + def scan_egg_link(self, path, entry): + with open(os.path.join(path, entry)) as raw_lines: + # filter non-empty lines + lines = list(filter(None, map(str.strip, raw_lines))) + + if len(lines) != 2: + # format is not recognized; punt + return + + egg_path, setup_path = lines + + for dist in find_distributions(os.path.join(path, egg_path)): + dist.location = os.path.join(path, *lines) + dist.precedence = SOURCE_DIST + self.add(dist) + + def process_index(self, url, page): + """Process the contents of a PyPI page""" + + def scan(link): + # Process a URL to see if it's for a package page + if link.startswith(self.index_url): + parts = list(map( + urllib.parse.unquote, link[len(self.index_url):].split('/') + )) + if len(parts) == 2 and '#' not in parts[1]: + # it's a package page, sanitize and index it + pkg = safe_name(parts[0]) + ver = safe_version(parts[1]) + self.package_pages.setdefault(pkg.lower(), {})[link] = True + return to_filename(pkg), to_filename(ver) + return None, None + + # process an index page into the package-page index + for match in HREF.finditer(page): + try: + scan(urllib.parse.urljoin(url, htmldecode(match.group(1)))) + except ValueError: + pass + + pkg, ver = scan(url) # ensure this page is in the page index + if pkg: + # process individual package page + for new_url in find_external_links(url, page): + # Process the found URL + base, frag = egg_info_for_url(new_url) + if base.endswith('.py') and not frag: + if ver: + new_url += '#egg=%s-%s' % (pkg, ver) + else: + self.need_version_info(url) + self.scan_url(new_url) + + return PYPI_MD5.sub( + lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page + ) + else: + return "" # no sense double-scanning non-package pages + + def need_version_info(self, url): + self.scan_all( + "Page at %s links to .py file(s) without version info; an index " + "scan is required.", url + ) + + def scan_all(self, msg=None, *args): + if self.index_url not in self.fetched_urls: + if msg: + self.warn(msg, *args) + self.info( + "Scanning index of all packages (this may take a while)" + ) + self.scan_url(self.index_url) + + def find_packages(self, requirement): + self.scan_url(self.index_url + requirement.unsafe_name + '/') + + if not self.package_pages.get(requirement.key): + # Fall back to safe version of the name + self.scan_url(self.index_url + requirement.project_name + '/') + + if not self.package_pages.get(requirement.key): + # We couldn't find the target package, so search the index page too + self.not_found_in_index(requirement) + + for url in list(self.package_pages.get(requirement.key, ())): + # scan each page that might be related to the desired package + self.scan_url(url) + + def obtain(self, requirement, installer=None): + self.prescan() + self.find_packages(requirement) + for dist in self[requirement.key]: + if dist in requirement: + return dist + self.debug("%s does not match %s", requirement, dist) + return super(PackageIndex, self).obtain(requirement, installer) + + def check_hash(self, checker, filename, tfp): + """ + checker is a ContentChecker + """ + checker.report( + self.debug, + "Validating %%s checksum for %s" % filename) + if not checker.is_valid(): + tfp.close() + os.unlink(filename) + raise DistutilsError( + "%s validation failed for %s; " + "possible download problem?" + % (checker.hash.name, os.path.basename(filename)) + ) + + def add_find_links(self, urls): + """Add `urls` to the list that will be prescanned for searches""" + for url in urls: + if ( + self.to_scan is None # if we have already "gone online" + or not URL_SCHEME(url) # or it's a local file/directory + or url.startswith('file:') + or list(distros_for_url(url)) # or a direct package link + ): + # then go ahead and process it now + self.scan_url(url) + else: + # otherwise, defer retrieval till later + self.to_scan.append(url) + + def prescan(self): + """Scan urls scheduled for prescanning (e.g. --find-links)""" + if self.to_scan: + list(map(self.scan_url, self.to_scan)) + self.to_scan = None # from now on, go ahead and process immediately + + def not_found_in_index(self, requirement): + if self[requirement.key]: # we've seen at least one distro + meth, msg = self.info, "Couldn't retrieve index page for %r" + else: # no distros seen for this name, might be misspelled + meth, msg = ( + self.warn, + "Couldn't find index page for %r (maybe misspelled?)") + meth(msg, requirement.unsafe_name) + self.scan_all() + + def download(self, spec, tmpdir): + """Locate and/or download `spec` to `tmpdir`, returning a local path + + `spec` may be a ``Requirement`` object, or a string containing a URL, + an existing local filename, or a project/version requirement spec + (i.e. the string form of a ``Requirement`` object). If it is the URL + of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one + that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is + automatically created alongside the downloaded file. + + If `spec` is a ``Requirement`` object or a string containing a + project/version requirement spec, this method returns the location of + a matching distribution (possibly after downloading it to `tmpdir`). + If `spec` is a locally existing file or directory name, it is simply + returned unchanged. If `spec` is a URL, it is downloaded to a subpath + of `tmpdir`, and the local filename is returned. Various errors may be + raised if a problem occurs during downloading. + """ + if not isinstance(spec, Requirement): + scheme = URL_SCHEME(spec) + if scheme: + # It's a url, download it to tmpdir + found = self._download_url(scheme.group(1), spec, tmpdir) + base, fragment = egg_info_for_url(spec) + if base.endswith('.py'): + found = self.gen_setup(found, fragment, tmpdir) + return found + elif os.path.exists(spec): + # Existing file or directory, just return it + return spec + else: + spec = parse_requirement_arg(spec) + return getattr(self.fetch_distribution(spec, tmpdir), 'location', None) + + def fetch_distribution( + self, requirement, tmpdir, force_scan=False, source=False, + develop_ok=False, local_index=None): + """Obtain a distribution suitable for fulfilling `requirement` + + `requirement` must be a ``pkg_resources.Requirement`` instance. + If necessary, or if the `force_scan` flag is set, the requirement is + searched for in the (online) package index as well as the locally + installed packages. If a distribution matching `requirement` is found, + the returned distribution's ``location`` is the value you would have + gotten from calling the ``download()`` method with the matching + distribution's URL or filename. If no matching distribution is found, + ``None`` is returned. + + If the `source` flag is set, only source distributions and source + checkout links will be considered. Unless the `develop_ok` flag is + set, development and system eggs (i.e., those using the ``.egg-info`` + format) will be ignored. + """ + # process a Requirement + self.info("Searching for %s", requirement) + skipped = {} + dist = None + + def find(req, env=None): + if env is None: + env = self + # Find a matching distribution; may be called more than once + + for dist in env[req.key]: + + if dist.precedence == DEVELOP_DIST and not develop_ok: + if dist not in skipped: + self.warn( + "Skipping development or system egg: %s", dist, + ) + skipped[dist] = 1 + continue + + test = ( + dist in req + and (dist.precedence <= SOURCE_DIST or not source) + ) + if test: + loc = self.download(dist.location, tmpdir) + dist.download_location = loc + if os.path.exists(dist.download_location): + return dist + + if force_scan: + self.prescan() + self.find_packages(requirement) + dist = find(requirement) + + if not dist and local_index is not None: + dist = find(requirement, local_index) + + if dist is None: + if self.to_scan is not None: + self.prescan() + dist = find(requirement) + + if dist is None and not force_scan: + self.find_packages(requirement) + dist = find(requirement) + + if dist is None: + self.warn( + "No local packages or working download links found for %s%s", + (source and "a source distribution of " or ""), + requirement, + ) + else: + self.info("Best match: %s", dist) + return dist.clone(location=dist.download_location) + + def fetch(self, requirement, tmpdir, force_scan=False, source=False): + """Obtain a file suitable for fulfilling `requirement` + + DEPRECATED; use the ``fetch_distribution()`` method now instead. For + backward compatibility, this routine is identical but returns the + ``location`` of the downloaded distribution instead of a distribution + object. + """ + dist = self.fetch_distribution(requirement, tmpdir, force_scan, source) + if dist is not None: + return dist.location + return None + + def gen_setup(self, filename, fragment, tmpdir): + match = EGG_FRAGMENT.match(fragment) + dists = match and [ + d for d in + interpret_distro_name(filename, match.group(1), None) if d.version + ] or [] + + if len(dists) == 1: # unambiguous ``#egg`` fragment + basename = os.path.basename(filename) + + # Make sure the file has been downloaded to the temp dir. + if os.path.dirname(filename) != tmpdir: + dst = os.path.join(tmpdir, basename) + from setuptools.command.easy_install import samefile + if not samefile(filename, dst): + shutil.copy2(filename, dst) + filename = dst + + with open(os.path.join(tmpdir, 'setup.py'), 'w') as file: + file.write( + "from setuptools import setup\n" + "setup(name=%r, version=%r, py_modules=[%r])\n" + % ( + dists[0].project_name, dists[0].version, + os.path.splitext(basename)[0] + ) + ) + return filename + + elif match: + raise DistutilsError( + "Can't unambiguously interpret project/version identifier %r; " + "any dashes in the name or version should be escaped using " + "underscores. %r" % (fragment, dists) + ) + else: + raise DistutilsError( + "Can't process plain .py files without an '#egg=name-version'" + " suffix to enable automatic setup script generation." + ) + + dl_blocksize = 8192 + + def _download_to(self, url, filename): + self.info("Downloading %s", url) + # Download the file + fp = None + try: + checker = HashChecker.from_url(url) + fp = self.open_url(url) + if isinstance(fp, urllib.error.HTTPError): + raise DistutilsError( + "Can't download %s: %s %s" % (url, fp.code, fp.msg) + ) + headers = fp.info() + blocknum = 0 + bs = self.dl_blocksize + size = -1 + if "content-length" in headers: + # Some servers return multiple Content-Length headers :( + sizes = get_all_headers(headers, 'Content-Length') + size = max(map(int, sizes)) + self.reporthook(url, filename, blocknum, bs, size) + with open(filename, 'wb') as tfp: + while True: + block = fp.read(bs) + if block: + checker.feed(block) + tfp.write(block) + blocknum += 1 + self.reporthook(url, filename, blocknum, bs, size) + else: + break + self.check_hash(checker, filename, tfp) + return headers + finally: + if fp: + fp.close() + + def reporthook(self, url, filename, blocknum, blksize, size): + pass # no-op + + def open_url(self, url, warning=None): + if url.startswith('file:'): + return local_open(url) + try: + return open_with_auth(url, self.opener) + except (ValueError, http_client.InvalidURL) as v: + msg = ' '.join([str(arg) for arg in v.args]) + if warning: + self.warn(warning, msg) + else: + raise DistutilsError('%s %s' % (url, msg)) + except urllib.error.HTTPError as v: + return v + except urllib.error.URLError as v: + if warning: + self.warn(warning, v.reason) + else: + raise DistutilsError("Download error for %s: %s" + % (url, v.reason)) + except http_client.BadStatusLine as v: + if warning: + self.warn(warning, v.line) + else: + raise DistutilsError( + '%s returned a bad status line. The server might be ' + 'down, %s' % + (url, v.line) + ) + except (http_client.HTTPException, socket.error) as v: + if warning: + self.warn(warning, v) + else: + raise DistutilsError("Download error for %s: %s" + % (url, v)) + + def _download_url(self, scheme, url, tmpdir): + # Determine download filename + # + name, fragment = egg_info_for_url(url) + if name: + while '..' in name: + name = name.replace('..', '.').replace('\\', '_') + else: + name = "__downloaded__" # default if URL has no path contents + + if name.endswith('.egg.zip'): + name = name[:-4] # strip the extra .zip before download + + filename = os.path.join(tmpdir, name) + + # Download the file + # + if scheme == 'svn' or scheme.startswith('svn+'): + return self._download_svn(url, filename) + elif scheme == 'git' or scheme.startswith('git+'): + return self._download_git(url, filename) + elif scheme.startswith('hg+'): + return self._download_hg(url, filename) + elif scheme == 'file': + return urllib.request.url2pathname(urllib.parse.urlparse(url)[2]) + else: + self.url_ok(url, True) # raises error if not allowed + return self._attempt_download(url, filename) + + def scan_url(self, url): + self.process_url(url, True) + + def _attempt_download(self, url, filename): + headers = self._download_to(url, filename) + if 'html' in headers.get('content-type', '').lower(): + return self._download_html(url, headers, filename) + else: + return filename + + def _download_html(self, url, headers, filename): + file = open(filename) + for line in file: + if line.strip(): + # Check for a subversion index page + if re.search(r'<title>([^- ]+ - )?Revision \d+:', line): + # it's a subversion index page: + file.close() + os.unlink(filename) + return self._download_svn(url, filename) + break # not an index page + file.close() + os.unlink(filename) + raise DistutilsError("Unexpected HTML page found at " + url) + + def _download_svn(self, url, filename): + warnings.warn("SVN download support is deprecated", UserWarning) + url = url.split('#', 1)[0] # remove any fragment for svn's sake + creds = '' + if url.lower().startswith('svn:') and '@' in url: + scheme, netloc, path, p, q, f = urllib.parse.urlparse(url) + if not netloc and path.startswith('//') and '/' in path[2:]: + netloc, path = path[2:].split('/', 1) + auth, host = _splituser(netloc) + if auth: + if ':' in auth: + user, pw = auth.split(':', 1) + creds = " --username=%s --password=%s" % (user, pw) + else: + creds = " --username=" + auth + netloc = host + parts = scheme, netloc, url, p, q, f + url = urllib.parse.urlunparse(parts) + self.info("Doing subversion checkout from %s to %s", url, filename) + os.system("svn checkout%s -q %s %s" % (creds, url, filename)) + return filename + + @staticmethod + def _vcs_split_rev_from_url(url, pop_prefix=False): + scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) + + scheme = scheme.split('+', 1)[-1] + + # Some fragment identification fails + path = path.split('#', 1)[0] + + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + + # Also, discard fragment + url = urllib.parse.urlunsplit((scheme, netloc, path, query, '')) + + return url, rev + + def _download_git(self, url, filename): + filename = filename.split('#', 1)[0] + url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) + + self.info("Doing git clone from %s to %s", url, filename) + os.system("git clone --quiet %s %s" % (url, filename)) + + if rev is not None: + self.info("Checking out %s", rev) + os.system("git -C %s checkout --quiet %s" % ( + filename, + rev, + )) + + return filename + + def _download_hg(self, url, filename): + filename = filename.split('#', 1)[0] + url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) + + self.info("Doing hg clone from %s to %s", url, filename) + os.system("hg clone --quiet %s %s" % (url, filename)) + + if rev is not None: + self.info("Updating to %s", rev) + os.system("hg --cwd %s up -C -r %s -q" % ( + filename, + rev, + )) + + return filename + + def debug(self, msg, *args): + log.debug(msg, *args) + + def info(self, msg, *args): + log.info(msg, *args) + + def warn(self, msg, *args): + log.warn(msg, *args) + + +# This pattern matches a character entity reference (a decimal numeric +# references, a hexadecimal numeric reference, or a named reference). +entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub + + +def decode_entity(match): + what = match.group(0) + return unescape(what) + + +def htmldecode(text): + """ + Decode HTML entities in the given text. + + >>> htmldecode( + ... 'https://../package_name-0.1.2.tar.gz' + ... '?tokena=A&tokenb=B">package_name-0.1.2.tar.gz') + 'https://../package_name-0.1.2.tar.gz?tokena=A&tokenb=B">package_name-0.1.2.tar.gz' + """ + return entity_sub(decode_entity, text) + + +def socket_timeout(timeout=15): + def _socket_timeout(func): + def _socket_timeout(*args, **kwargs): + old_timeout = socket.getdefaulttimeout() + socket.setdefaulttimeout(timeout) + try: + return func(*args, **kwargs) + finally: + socket.setdefaulttimeout(old_timeout) + + return _socket_timeout + + return _socket_timeout + + +def _encode_auth(auth): + """ + A function compatible with Python 2.3-3.3 that will encode + auth from a URL suitable for an HTTP header. + >>> str(_encode_auth('username%3Apassword')) + 'dXNlcm5hbWU6cGFzc3dvcmQ=' + + Long auth strings should not cause a newline to be inserted. + >>> long_auth = 'username:' + 'password'*10 + >>> chr(10) in str(_encode_auth(long_auth)) + False + """ + auth_s = urllib.parse.unquote(auth) + # convert to bytes + auth_bytes = auth_s.encode() + encoded_bytes = base64.b64encode(auth_bytes) + # convert back to a string + encoded = encoded_bytes.decode() + # strip the trailing carriage return + return encoded.replace('\n', '') + + +class Credential: + """ + A username/password pair. Use like a namedtuple. + """ + + def __init__(self, username, password): + self.username = username + self.password = password + + def __iter__(self): + yield self.username + yield self.password + + def __str__(self): + return '%(username)s:%(password)s' % vars(self) + + +class PyPIConfig(configparser.RawConfigParser): + def __init__(self): + """ + Load from ~/.pypirc + """ + defaults = dict.fromkeys(['username', 'password', 'repository'], '') + configparser.RawConfigParser.__init__(self, defaults) + + rc = os.path.join(os.path.expanduser('~'), '.pypirc') + if os.path.exists(rc): + self.read(rc) + + @property + def creds_by_repository(self): + sections_with_repositories = [ + section for section in self.sections() + if self.get(section, 'repository').strip() + ] + + return dict(map(self._get_repo_cred, sections_with_repositories)) + + def _get_repo_cred(self, section): + repo = self.get(section, 'repository').strip() + return repo, Credential( + self.get(section, 'username').strip(), + self.get(section, 'password').strip(), + ) + + def find_credential(self, url): + """ + If the URL indicated appears to be a repository defined in this + config, return the credential for that repository. + """ + for repository, cred in self.creds_by_repository.items(): + if url.startswith(repository): + return cred + + +def open_with_auth(url, opener=urllib.request.urlopen): + """Open a urllib2 request, handling HTTP authentication""" + + parsed = urllib.parse.urlparse(url) + scheme, netloc, path, params, query, frag = parsed + + # Double scheme does not raise on Mac OS X as revealed by a + # failing test. We would expect "nonnumeric port". Refs #20. + if netloc.endswith(':'): + raise http_client.InvalidURL("nonnumeric port: ''") + + if scheme in ('http', 'https'): + auth, address = _splituser(netloc) + else: + auth = None + + if not auth: + cred = PyPIConfig().find_credential(url) + if cred: + auth = str(cred) + info = cred.username, url + log.info('Authenticating as %s for %s (from .pypirc)', *info) + + if auth: + auth = "Basic " + _encode_auth(auth) + parts = scheme, address, path, params, query, frag + new_url = urllib.parse.urlunparse(parts) + request = urllib.request.Request(new_url) + request.add_header("Authorization", auth) + else: + request = urllib.request.Request(url) + + request.add_header('User-Agent', user_agent) + fp = opener(request) + + if auth: + # Put authentication info back into request URL if same host, + # so that links found on the page will work + s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url) + if s2 == scheme and h2 == address: + parts = s2, netloc, path2, param2, query2, frag2 + fp.url = urllib.parse.urlunparse(parts) + + return fp + + +# copy of urllib.parse._splituser from Python 3.8 +def _splituser(host): + """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + user, delim, host = host.rpartition('@') + return (user if delim else None), host + + +# adding a timeout to avoid freezing package_index +open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) + + +def fix_sf_url(url): + return url # backward compatibility + + +def local_open(url): + """Read a local path, with special support for directories""" + scheme, server, path, param, query, frag = urllib.parse.urlparse(url) + filename = urllib.request.url2pathname(path) + if os.path.isfile(filename): + return urllib.request.urlopen(url) + elif path.endswith('/') and os.path.isdir(filename): + files = [] + for f in os.listdir(filename): + filepath = os.path.join(filename, f) + if f == 'index.html': + with open(filepath, 'r') as fp: + body = fp.read() + break + elif os.path.isdir(filepath): + f += '/' + files.append('<a href="{name}">{name}</a>'.format(name=f)) + else: + tmpl = ( + "<html><head><title>{url}</title>" + "</head><body>{files}</body></html>") + body = tmpl.format(url=url, files='\n'.join(files)) + status, message = 200, "OK" + else: + status, message, body = 404, "Path not found", "Not found" + + headers = {'content-type': 'text/html'} + body_stream = six.StringIO(body) + return urllib.error.HTTPError(url, status, message, headers, body_stream) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/py27compat.py b/backend/test/lib/python3.8/site-packages/setuptools/py27compat.py new file mode 100644 index 0000000000000000000000000000000000000000..1d57360f4eff13cd94a25fec989036a0b0b80523 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/py27compat.py @@ -0,0 +1,60 @@ +""" +Compatibility Support for Python 2.7 and earlier +""" + +import sys +import platform + +from setuptools.extern import six + + +def get_all_headers(message, key): + """ + Given an HTTPMessage, return all headers matching a given key. + """ + return message.get_all(key) + + +if six.PY2: + def get_all_headers(message, key): + return message.getheaders(key) + + +linux_py2_ascii = ( + platform.system() == 'Linux' and + six.PY2 +) + +rmtree_safe = str if linux_py2_ascii else lambda x: x +"""Workaround for http://bugs.python.org/issue24672""" + + +try: + from ._imp import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE + from ._imp import get_frozen_object, get_module +except ImportError: + import imp + from imp import PY_COMPILED, PY_FROZEN, PY_SOURCE # noqa + + def find_module(module, paths=None): + """Just like 'imp.find_module()', but with package support""" + parts = module.split('.') + while parts: + part = parts.pop(0) + f, path, (suffix, mode, kind) = info = imp.find_module(part, paths) + + if kind == imp.PKG_DIRECTORY: + parts = parts or ['__init__'] + paths = [path] + + elif parts: + raise ImportError("Can't find %r in %s" % (parts, module)) + + return info + + def get_frozen_object(module, paths): + return imp.get_frozen_object(module) + + def get_module(module, paths, info): + imp.load_module(module, *info) + return sys.modules[module] diff --git a/backend/test/lib/python3.8/site-packages/setuptools/py31compat.py b/backend/test/lib/python3.8/site-packages/setuptools/py31compat.py new file mode 100644 index 0000000000000000000000000000000000000000..e1da7ee2a2c56e46e09665d98ba1bc5bfedd2c3e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/py31compat.py @@ -0,0 +1,32 @@ +__all__ = [] + +__metaclass__ = type + + +try: + # Python >=3.2 + from tempfile import TemporaryDirectory +except ImportError: + import shutil + import tempfile + + class TemporaryDirectory: + """ + Very simple temporary directory context manager. + Will try to delete afterward, but will also ignore OS and similar + errors on deletion. + """ + + def __init__(self, **kwargs): + self.name = None # Handle mkdtemp raising an exception + self.name = tempfile.mkdtemp(**kwargs) + + def __enter__(self): + return self.name + + def __exit__(self, exctype, excvalue, exctrace): + try: + shutil.rmtree(self.name, True) + except OSError: # removal errors are not the only possible + pass + self.name = None diff --git a/backend/test/lib/python3.8/site-packages/setuptools/py33compat.py b/backend/test/lib/python3.8/site-packages/setuptools/py33compat.py new file mode 100644 index 0000000000000000000000000000000000000000..cb69443638354b46b43da5bbf187b4f7cba301f1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/py33compat.py @@ -0,0 +1,59 @@ +import dis +import array +import collections + +try: + import html +except ImportError: + html = None + +from setuptools.extern import six +from setuptools.extern.six.moves import html_parser + +__metaclass__ = type + +OpArg = collections.namedtuple('OpArg', 'opcode arg') + + +class Bytecode_compat: + def __init__(self, code): + self.code = code + + def __iter__(self): + """Yield '(op,arg)' pair for each operation in code object 'code'""" + + bytes = array.array('b', self.code.co_code) + eof = len(self.code.co_code) + + ptr = 0 + extended_arg = 0 + + while ptr < eof: + + op = bytes[ptr] + + if op >= dis.HAVE_ARGUMENT: + + arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg + ptr += 3 + + if op == dis.EXTENDED_ARG: + long_type = six.integer_types[-1] + extended_arg = arg * long_type(65536) + continue + + else: + arg = None + ptr += 1 + + yield OpArg(op, arg) + + +Bytecode = getattr(dis, 'Bytecode', Bytecode_compat) + + +unescape = getattr(html, 'unescape', None) +if unescape is None: + # HTMLParser.unescape is deprecated since Python 3.4, and will be removed + # from 3.9. + unescape = html_parser.HTMLParser().unescape diff --git a/backend/test/lib/python3.8/site-packages/setuptools/py34compat.py b/backend/test/lib/python3.8/site-packages/setuptools/py34compat.py new file mode 100644 index 0000000000000000000000000000000000000000..3ad917222a4e5bb93fe1c9e8fe1713bcab3630b6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/py34compat.py @@ -0,0 +1,13 @@ +import importlib + +try: + import importlib.util +except ImportError: + pass + + +try: + module_from_spec = importlib.util.module_from_spec +except AttributeError: + def module_from_spec(spec): + return spec.loader.load_module(spec.name) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/sandbox.py b/backend/test/lib/python3.8/site-packages/setuptools/sandbox.py new file mode 100644 index 0000000000000000000000000000000000000000..685f3f72e3611a5fa99c999e233ffd179c431a6d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/sandbox.py @@ -0,0 +1,491 @@ +import os +import sys +import tempfile +import operator +import functools +import itertools +import re +import contextlib +import pickle +import textwrap + +from setuptools.extern import six +from setuptools.extern.six.moves import builtins, map + +import pkg_resources.py31compat + +if sys.platform.startswith('java'): + import org.python.modules.posix.PosixModule as _os +else: + _os = sys.modules[os.name] +try: + _file = file +except NameError: + _file = None +_open = open +from distutils.errors import DistutilsError +from pkg_resources import working_set + + +__all__ = [ + "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", +] + + +def _execfile(filename, globals, locals=None): + """ + Python 3 implementation of execfile. + """ + mode = 'rb' + with open(filename, mode) as stream: + script = stream.read() + if locals is None: + locals = globals + code = compile(script, filename, 'exec') + exec(code, globals, locals) + + +@contextlib.contextmanager +def save_argv(repl=None): + saved = sys.argv[:] + if repl is not None: + sys.argv[:] = repl + try: + yield saved + finally: + sys.argv[:] = saved + + +@contextlib.contextmanager +def save_path(): + saved = sys.path[:] + try: + yield saved + finally: + sys.path[:] = saved + + +@contextlib.contextmanager +def override_temp(replacement): + """ + Monkey-patch tempfile.tempdir with replacement, ensuring it exists + """ + pkg_resources.py31compat.makedirs(replacement, exist_ok=True) + + saved = tempfile.tempdir + + tempfile.tempdir = replacement + + try: + yield + finally: + tempfile.tempdir = saved + + +@contextlib.contextmanager +def pushd(target): + saved = os.getcwd() + os.chdir(target) + try: + yield saved + finally: + os.chdir(saved) + + +class UnpickleableException(Exception): + """ + An exception representing another Exception that could not be pickled. + """ + + @staticmethod + def dump(type, exc): + """ + Always return a dumped (pickled) type and exc. If exc can't be pickled, + wrap it in UnpickleableException first. + """ + try: + return pickle.dumps(type), pickle.dumps(exc) + except Exception: + # get UnpickleableException inside the sandbox + from setuptools.sandbox import UnpickleableException as cls + return cls.dump(cls, cls(repr(exc))) + + +class ExceptionSaver: + """ + A Context Manager that will save an exception, serialized, and restore it + later. + """ + + def __enter__(self): + return self + + def __exit__(self, type, exc, tb): + if not exc: + return + + # dump the exception + self._saved = UnpickleableException.dump(type, exc) + self._tb = tb + + # suppress the exception + return True + + def resume(self): + "restore and re-raise any exception" + + if '_saved' not in vars(self): + return + + type, exc = map(pickle.loads, self._saved) + six.reraise(type, exc, self._tb) + + +@contextlib.contextmanager +def save_modules(): + """ + Context in which imported modules are saved. + + Translates exceptions internal to the context into the equivalent exception + outside the context. + """ + saved = sys.modules.copy() + with ExceptionSaver() as saved_exc: + yield saved + + sys.modules.update(saved) + # remove any modules imported since + del_modules = ( + mod_name for mod_name in sys.modules + if mod_name not in saved + # exclude any encodings modules. See #285 + and not mod_name.startswith('encodings.') + ) + _clear_modules(del_modules) + + saved_exc.resume() + + +def _clear_modules(module_names): + for mod_name in list(module_names): + del sys.modules[mod_name] + + +@contextlib.contextmanager +def save_pkg_resources_state(): + saved = pkg_resources.__getstate__() + try: + yield saved + finally: + pkg_resources.__setstate__(saved) + + +@contextlib.contextmanager +def setup_context(setup_dir): + temp_dir = os.path.join(setup_dir, 'temp') + with save_pkg_resources_state(): + with save_modules(): + hide_setuptools() + with save_path(): + with save_argv(): + with override_temp(temp_dir): + with pushd(setup_dir): + # ensure setuptools commands are available + __import__('setuptools') + yield + + +def _needs_hiding(mod_name): + """ + >>> _needs_hiding('setuptools') + True + >>> _needs_hiding('pkg_resources') + True + >>> _needs_hiding('setuptools_plugin') + False + >>> _needs_hiding('setuptools.__init__') + True + >>> _needs_hiding('distutils') + True + >>> _needs_hiding('os') + False + >>> _needs_hiding('Cython') + True + """ + pattern = re.compile(r'(setuptools|pkg_resources|distutils|Cython)(\.|$)') + return bool(pattern.match(mod_name)) + + +def hide_setuptools(): + """ + Remove references to setuptools' modules from sys.modules to allow the + invocation to import the most appropriate setuptools. This technique is + necessary to avoid issues such as #315 where setuptools upgrading itself + would fail to find a function declared in the metadata. + """ + modules = filter(_needs_hiding, sys.modules) + _clear_modules(modules) + + +def run_setup(setup_script, args): + """Run a distutils setup script, sandboxed in its directory""" + setup_dir = os.path.abspath(os.path.dirname(setup_script)) + with setup_context(setup_dir): + try: + sys.argv[:] = [setup_script] + list(args) + sys.path.insert(0, setup_dir) + # reset to include setup dir, w/clean callback list + working_set.__init__() + working_set.callbacks.append(lambda dist: dist.activate()) + + # __file__ should be a byte string on Python 2 (#712) + dunder_file = ( + setup_script + if isinstance(setup_script, str) else + setup_script.encode(sys.getfilesystemencoding()) + ) + + with DirectorySandbox(setup_dir): + ns = dict(__file__=dunder_file, __name__='__main__') + _execfile(setup_script, ns) + except SystemExit as v: + if v.args and v.args[0]: + raise + # Normal exit, just return + + +class AbstractSandbox: + """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" + + _active = False + + def __init__(self): + self._attrs = [ + name for name in dir(_os) + if not name.startswith('_') and hasattr(self, name) + ] + + def _copy(self, source): + for name in self._attrs: + setattr(os, name, getattr(source, name)) + + def __enter__(self): + self._copy(self) + if _file: + builtins.file = self._file + builtins.open = self._open + self._active = True + + def __exit__(self, exc_type, exc_value, traceback): + self._active = False + if _file: + builtins.file = _file + builtins.open = _open + self._copy(_os) + + def run(self, func): + """Run 'func' under os sandboxing""" + with self: + return func() + + def _mk_dual_path_wrapper(name): + original = getattr(_os, name) + + def wrap(self, src, dst, *args, **kw): + if self._active: + src, dst = self._remap_pair(name, src, dst, *args, **kw) + return original(src, dst, *args, **kw) + + return wrap + + for name in ["rename", "link", "symlink"]: + if hasattr(_os, name): + locals()[name] = _mk_dual_path_wrapper(name) + + def _mk_single_path_wrapper(name, original=None): + original = original or getattr(_os, name) + + def wrap(self, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return original(path, *args, **kw) + + return wrap + + if _file: + _file = _mk_single_path_wrapper('file', _file) + _open = _mk_single_path_wrapper('open', _open) + for name in [ + "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", + "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", + "startfile", "mkfifo", "mknod", "pathconf", "access" + ]: + if hasattr(_os, name): + locals()[name] = _mk_single_path_wrapper(name) + + def _mk_single_with_return(name): + original = getattr(_os, name) + + def wrap(self, path, *args, **kw): + if self._active: + path = self._remap_input(name, path, *args, **kw) + return self._remap_output(name, original(path, *args, **kw)) + return original(path, *args, **kw) + + return wrap + + for name in ['readlink', 'tempnam']: + if hasattr(_os, name): + locals()[name] = _mk_single_with_return(name) + + def _mk_query(name): + original = getattr(_os, name) + + def wrap(self, *args, **kw): + retval = original(*args, **kw) + if self._active: + return self._remap_output(name, retval) + return retval + + return wrap + + for name in ['getcwd', 'tmpnam']: + if hasattr(_os, name): + locals()[name] = _mk_query(name) + + def _validate_path(self, path): + """Called to remap or validate any path, whether input or output""" + return path + + def _remap_input(self, operation, path, *args, **kw): + """Called for path inputs""" + return self._validate_path(path) + + def _remap_output(self, operation, path): + """Called for path outputs""" + return self._validate_path(path) + + def _remap_pair(self, operation, src, dst, *args, **kw): + """Called for path pairs like rename, link, and symlink operations""" + return ( + self._remap_input(operation + '-from', src, *args, **kw), + self._remap_input(operation + '-to', dst, *args, **kw) + ) + + +if hasattr(os, 'devnull'): + _EXCEPTIONS = [os.devnull,] +else: + _EXCEPTIONS = [] + + +class DirectorySandbox(AbstractSandbox): + """Restrict operations to a single subdirectory - pseudo-chroot""" + + write_ops = dict.fromkeys([ + "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", + "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", + ]) + + _exception_patterns = [ + # Allow lib2to3 to attempt to save a pickled grammar object (#121) + r'.*lib2to3.*\.pickle$', + ] + "exempt writing to paths that match the pattern" + + def __init__(self, sandbox, exceptions=_EXCEPTIONS): + self._sandbox = os.path.normcase(os.path.realpath(sandbox)) + self._prefix = os.path.join(self._sandbox, '') + self._exceptions = [ + os.path.normcase(os.path.realpath(path)) + for path in exceptions + ] + AbstractSandbox.__init__(self) + + def _violation(self, operation, *args, **kw): + from setuptools.sandbox import SandboxViolation + raise SandboxViolation(operation, args, kw) + + if _file: + + def _file(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("file", path, mode, *args, **kw) + return _file(path, mode, *args, **kw) + + def _open(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("open", path, mode, *args, **kw) + return _open(path, mode, *args, **kw) + + def tmpnam(self): + self._violation("tmpnam") + + def _ok(self, path): + active = self._active + try: + self._active = False + realpath = os.path.normcase(os.path.realpath(path)) + return ( + self._exempted(realpath) + or realpath == self._sandbox + or realpath.startswith(self._prefix) + ) + finally: + self._active = active + + def _exempted(self, filepath): + start_matches = ( + filepath.startswith(exception) + for exception in self._exceptions + ) + pattern_matches = ( + re.match(pattern, filepath) + for pattern in self._exception_patterns + ) + candidates = itertools.chain(start_matches, pattern_matches) + return any(candidates) + + def _remap_input(self, operation, path, *args, **kw): + """Called for path inputs""" + if operation in self.write_ops and not self._ok(path): + self._violation(operation, os.path.realpath(path), *args, **kw) + return path + + def _remap_pair(self, operation, src, dst, *args, **kw): + """Called for path pairs like rename, link, and symlink operations""" + if not self._ok(src) or not self._ok(dst): + self._violation(operation, src, dst, *args, **kw) + return (src, dst) + + def open(self, file, flags, mode=0o777, *args, **kw): + """Called for low-level os.open()""" + if flags & WRITE_FLAGS and not self._ok(file): + self._violation("os.open", file, flags, mode, *args, **kw) + return _os.open(file, flags, mode, *args, **kw) + + +WRITE_FLAGS = functools.reduce( + operator.or_, [getattr(_os, a, 0) for a in + "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] +) + + +class SandboxViolation(DistutilsError): + """A setup script attempted to modify the filesystem outside the sandbox""" + + tmpl = textwrap.dedent(""" + SandboxViolation: {cmd}{args!r} {kwargs} + + The package setup script has attempted to modify files on your system + that are not within the EasyInstall build area, and has been aborted. + + This package cannot be safely installed by EasyInstall, and may not + support alternate installation locations even if you run its setup + script by hand. Please inform the package's author and the EasyInstall + maintainers to find out if a fix or workaround is available. + """).lstrip() + + def __str__(self): + cmd, args, kwargs = self.args + return self.tmpl.format(**locals()) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/script (dev).tmpl b/backend/test/lib/python3.8/site-packages/setuptools/script (dev).tmpl new file mode 100644 index 0000000000000000000000000000000000000000..39a24b04888e79df51e2237577b303a2f901be63 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/script (dev).tmpl @@ -0,0 +1,6 @@ +# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').require(%(spec)r) +__file__ = %(dev_path)r +with open(__file__) as f: + exec(compile(f.read(), __file__, 'exec')) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/script.tmpl b/backend/test/lib/python3.8/site-packages/setuptools/script.tmpl new file mode 100644 index 0000000000000000000000000000000000000000..ff5efbcab3b58063dd84787181c26a95fb663d94 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/script.tmpl @@ -0,0 +1,3 @@ +# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').run_script(%(spec)r, %(script_name)r) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/site-patch.py b/backend/test/lib/python3.8/site-packages/setuptools/site-patch.py new file mode 100644 index 0000000000000000000000000000000000000000..40b00de0a799686485b266fd92abb9fb100ed718 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/site-patch.py @@ -0,0 +1,74 @@ +def __boot(): + import sys + import os + PYTHONPATH = os.environ.get('PYTHONPATH') + if PYTHONPATH is None or (sys.platform == 'win32' and not PYTHONPATH): + PYTHONPATH = [] + else: + PYTHONPATH = PYTHONPATH.split(os.pathsep) + + pic = getattr(sys, 'path_importer_cache', {}) + stdpath = sys.path[len(PYTHONPATH):] + mydir = os.path.dirname(__file__) + + for item in stdpath: + if item == mydir or not item: + continue # skip if current dir. on Windows, or my own directory + importer = pic.get(item) + if importer is not None: + loader = importer.find_module('site') + if loader is not None: + # This should actually reload the current module + loader.load_module('site') + break + else: + try: + import imp # Avoid import loop in Python 3 + stream, path, descr = imp.find_module('site', [item]) + except ImportError: + continue + if stream is None: + continue + try: + # This should actually reload the current module + imp.load_module('site', stream, path, descr) + finally: + stream.close() + break + else: + raise ImportError("Couldn't find the real 'site' module") + + known_paths = dict([(makepath(item)[1], 1) for item in sys.path]) # 2.2 comp + + oldpos = getattr(sys, '__egginsert', 0) # save old insertion position + sys.__egginsert = 0 # and reset the current one + + for item in PYTHONPATH: + addsitedir(item) + + sys.__egginsert += oldpos # restore effective old position + + d, nd = makepath(stdpath[0]) + insert_at = None + new_path = [] + + for item in sys.path: + p, np = makepath(item) + + if np == nd and insert_at is None: + # We've hit the first 'system' path entry, so added entries go here + insert_at = len(new_path) + + if np in known_paths or insert_at is None: + new_path.append(item) + else: + # new path after the insert point, back-insert it + new_path.insert(insert_at, item) + insert_at += 1 + + sys.path[:] = new_path + + +if __name__ == 'site': + __boot() + del __boot diff --git a/backend/test/lib/python3.8/site-packages/setuptools/ssl_support.py b/backend/test/lib/python3.8/site-packages/setuptools/ssl_support.py new file mode 100644 index 0000000000000000000000000000000000000000..226db694bb38791147c6bf2881c4b86025dd2f8f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/ssl_support.py @@ -0,0 +1,260 @@ +import os +import socket +import atexit +import re +import functools + +from setuptools.extern.six.moves import urllib, http_client, map, filter + +from pkg_resources import ResolutionError, ExtractionError + +try: + import ssl +except ImportError: + ssl = None + +__all__ = [ + 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths', + 'opener_for' +] + +cert_paths = """ +/etc/pki/tls/certs/ca-bundle.crt +/etc/ssl/certs/ca-certificates.crt +/usr/share/ssl/certs/ca-bundle.crt +/usr/local/share/certs/ca-root.crt +/etc/ssl/cert.pem +/System/Library/OpenSSL/certs/cert.pem +/usr/local/share/certs/ca-root-nss.crt +/etc/ssl/ca-bundle.pem +""".strip().split() + +try: + HTTPSHandler = urllib.request.HTTPSHandler + HTTPSConnection = http_client.HTTPSConnection +except AttributeError: + HTTPSHandler = HTTPSConnection = object + +is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) + + +try: + from ssl import CertificateError, match_hostname +except ImportError: + try: + from backports.ssl_match_hostname import CertificateError + from backports.ssl_match_hostname import match_hostname + except ImportError: + CertificateError = None + match_hostname = None + +if not CertificateError: + + class CertificateError(ValueError): + pass + + +if not match_hostname: + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + https://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r'.') + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +class VerifyingHTTPSHandler(HTTPSHandler): + """Simple verifying handler: no auth, subclasses, timeouts, etc.""" + + def __init__(self, ca_bundle): + self.ca_bundle = ca_bundle + HTTPSHandler.__init__(self) + + def https_open(self, req): + return self.do_open( + lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req + ) + + +class VerifyingHTTPSConn(HTTPSConnection): + """Simple verifying connection: no auth, subclasses, timeouts, etc.""" + + def __init__(self, host, ca_bundle, **kw): + HTTPSConnection.__init__(self, host, **kw) + self.ca_bundle = ca_bundle + + def connect(self): + sock = socket.create_connection( + (self.host, self.port), getattr(self, 'source_address', None) + ) + + # Handle the socket if a (proxy) tunnel is present + if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None): + self.sock = sock + self._tunnel() + # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7 + # change self.host to mean the proxy server host when tunneling is + # being used. Adapt, since we are interested in the destination + # host for the match_hostname() comparison. + actual_host = self._tunnel_host + else: + actual_host = self.host + + if hasattr(ssl, 'create_default_context'): + ctx = ssl.create_default_context(cafile=self.ca_bundle) + self.sock = ctx.wrap_socket(sock, server_hostname=actual_host) + else: + # This is for python < 2.7.9 and < 3.4? + self.sock = ssl.wrap_socket( + sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle + ) + try: + match_hostname(self.sock.getpeercert(), actual_host) + except CertificateError: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + +def opener_for(ca_bundle=None): + """Get a urlopen() replacement that uses ca_bundle for verification""" + return urllib.request.build_opener( + VerifyingHTTPSHandler(ca_bundle or find_ca_bundle()) + ).open + + +# from jaraco.functools +def once(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + if not hasattr(func, 'always_returns'): + func.always_returns = func(*args, **kwargs) + return func.always_returns + return wrapper + + +@once +def get_win_certfile(): + try: + import wincertstore + except ImportError: + return None + + class CertFile(wincertstore.CertFile): + def __init__(self): + super(CertFile, self).__init__() + atexit.register(self.close) + + def close(self): + try: + super(CertFile, self).close() + except OSError: + pass + + _wincerts = CertFile() + _wincerts.addstore('CA') + _wincerts.addstore('ROOT') + return _wincerts.name + + +def find_ca_bundle(): + """Return an existing CA bundle path, or None""" + extant_cert_paths = filter(os.path.isfile, cert_paths) + return ( + get_win_certfile() + or next(extant_cert_paths, None) + or _certifi_where() + ) + + +def _certifi_where(): + try: + return __import__('certifi').where() + except (ImportError, ResolutionError, ExtractionError): + pass diff --git a/backend/test/lib/python3.8/site-packages/setuptools/unicode_utils.py b/backend/test/lib/python3.8/site-packages/setuptools/unicode_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7c63efd20b350358ab25c079166dbb00ef49f8d2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/unicode_utils.py @@ -0,0 +1,44 @@ +import unicodedata +import sys + +from setuptools.extern import six + + +# HFS Plus uses decomposed UTF-8 +def decompose(path): + if isinstance(path, six.text_type): + return unicodedata.normalize('NFD', path) + try: + path = path.decode('utf-8') + path = unicodedata.normalize('NFD', path) + path = path.encode('utf-8') + except UnicodeError: + pass # Not UTF-8 + return path + + +def filesys_decode(path): + """ + Ensure that the given path is decoded, + NONE when no expected encoding works + """ + + if isinstance(path, six.text_type): + return path + + fs_enc = sys.getfilesystemencoding() or 'utf-8' + candidates = fs_enc, 'utf-8' + + for enc in candidates: + try: + return path.decode(enc) + except UnicodeDecodeError: + continue + + +def try_encode(string, enc): + "turn unicode encoding into a functional routine" + try: + return string.encode(enc) + except UnicodeEncodeError: + return None diff --git a/backend/test/lib/python3.8/site-packages/setuptools/version.py b/backend/test/lib/python3.8/site-packages/setuptools/version.py new file mode 100644 index 0000000000000000000000000000000000000000..95e1869658566aac3060562d8cd5a6b647887d1e --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/version.py @@ -0,0 +1,6 @@ +import pkg_resources + +try: + __version__ = pkg_resources.get_distribution('setuptools').version +except Exception: + __version__ = 'unknown' diff --git a/backend/test/lib/python3.8/site-packages/setuptools/wheel.py b/backend/test/lib/python3.8/site-packages/setuptools/wheel.py new file mode 100644 index 0000000000000000000000000000000000000000..025aaa828a24cb7746e5fac9b66984d5b9794bc3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/wheel.py @@ -0,0 +1,220 @@ +"""Wheels support.""" + +from distutils.util import get_platform +from distutils import log +import email +import itertools +import os +import posixpath +import re +import zipfile + +import pkg_resources +import setuptools +from pkg_resources import parse_version +from setuptools.extern.packaging.tags import sys_tags +from setuptools.extern.packaging.utils import canonicalize_name +from setuptools.extern.six import PY3 +from setuptools.command.egg_info import write_requirements + + +__metaclass__ = type + + +WHEEL_NAME = re.compile( + r"""^(?P<project_name>.+?)-(?P<version>\d.*?) + ((-(?P<build>\d.*?))?-(?P<py_version>.+?)-(?P<abi>.+?)-(?P<platform>.+?) + )\.whl$""", + re.VERBOSE).match + +NAMESPACE_PACKAGE_INIT = '''\ +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + __path__ = __import__('pkgutil').extend_path(__path__, __name__) +''' + + +def unpack(src_dir, dst_dir): + '''Move everything under `src_dir` to `dst_dir`, and delete the former.''' + for dirpath, dirnames, filenames in os.walk(src_dir): + subdir = os.path.relpath(dirpath, src_dir) + for f in filenames: + src = os.path.join(dirpath, f) + dst = os.path.join(dst_dir, subdir, f) + os.renames(src, dst) + for n, d in reversed(list(enumerate(dirnames))): + src = os.path.join(dirpath, d) + dst = os.path.join(dst_dir, subdir, d) + if not os.path.exists(dst): + # Directory does not exist in destination, + # rename it and prune it from os.walk list. + os.renames(src, dst) + del dirnames[n] + # Cleanup. + for dirpath, dirnames, filenames in os.walk(src_dir, topdown=True): + assert not filenames + os.rmdir(dirpath) + + +class Wheel: + + def __init__(self, filename): + match = WHEEL_NAME(os.path.basename(filename)) + if match is None: + raise ValueError('invalid wheel name: %r' % filename) + self.filename = filename + for k, v in match.groupdict().items(): + setattr(self, k, v) + + def tags(self): + '''List tags (py_version, abi, platform) supported by this wheel.''' + return itertools.product( + self.py_version.split('.'), + self.abi.split('.'), + self.platform.split('.'), + ) + + def is_compatible(self): + '''Is the wheel is compatible with the current platform?''' + supported_tags = set((t.interpreter, t.abi, t.platform) for t in sys_tags()) + return next((True for t in self.tags() if t in supported_tags), False) + + def egg_name(self): + return pkg_resources.Distribution( + project_name=self.project_name, version=self.version, + platform=(None if self.platform == 'any' else get_platform()), + ).egg_name() + '.egg' + + def get_dist_info(self, zf): + # find the correct name of the .dist-info dir in the wheel file + for member in zf.namelist(): + dirname = posixpath.dirname(member) + if (dirname.endswith('.dist-info') and + canonicalize_name(dirname).startswith( + canonicalize_name(self.project_name))): + return dirname + raise ValueError("unsupported wheel format. .dist-info not found") + + def install_as_egg(self, destination_eggdir): + '''Install wheel as an egg directory.''' + with zipfile.ZipFile(self.filename) as zf: + self._install_as_egg(destination_eggdir, zf) + + def _install_as_egg(self, destination_eggdir, zf): + dist_basename = '%s-%s' % (self.project_name, self.version) + dist_info = self.get_dist_info(zf) + dist_data = '%s.data' % dist_basename + egg_info = os.path.join(destination_eggdir, 'EGG-INFO') + + self._convert_metadata(zf, destination_eggdir, dist_info, egg_info) + self._move_data_entries(destination_eggdir, dist_data) + self._fix_namespace_packages(egg_info, destination_eggdir) + + @staticmethod + def _convert_metadata(zf, destination_eggdir, dist_info, egg_info): + def get_metadata(name): + with zf.open(posixpath.join(dist_info, name)) as fp: + value = fp.read().decode('utf-8') if PY3 else fp.read() + return email.parser.Parser().parsestr(value) + + wheel_metadata = get_metadata('WHEEL') + # Check wheel format version is supported. + wheel_version = parse_version(wheel_metadata.get('Wheel-Version')) + wheel_v1 = ( + parse_version('1.0') <= wheel_version < parse_version('2.0dev0') + ) + if not wheel_v1: + raise ValueError( + 'unsupported wheel format version: %s' % wheel_version) + # Extract to target directory. + os.mkdir(destination_eggdir) + zf.extractall(destination_eggdir) + # Convert metadata. + dist_info = os.path.join(destination_eggdir, dist_info) + dist = pkg_resources.Distribution.from_location( + destination_eggdir, dist_info, + metadata=pkg_resources.PathMetadata(destination_eggdir, dist_info), + ) + + # Note: Evaluate and strip markers now, + # as it's difficult to convert back from the syntax: + # foobar; "linux" in sys_platform and extra == 'test' + def raw_req(req): + req.marker = None + return str(req) + install_requires = list(sorted(map(raw_req, dist.requires()))) + extras_require = { + extra: sorted( + req + for req in map(raw_req, dist.requires((extra,))) + if req not in install_requires + ) + for extra in dist.extras + } + os.rename(dist_info, egg_info) + os.rename( + os.path.join(egg_info, 'METADATA'), + os.path.join(egg_info, 'PKG-INFO'), + ) + setup_dist = setuptools.Distribution( + attrs=dict( + install_requires=install_requires, + extras_require=extras_require, + ), + ) + # Temporarily disable info traces. + log_threshold = log._global_log.threshold + log.set_threshold(log.WARN) + try: + write_requirements( + setup_dist.get_command_obj('egg_info'), + None, + os.path.join(egg_info, 'requires.txt'), + ) + finally: + log.set_threshold(log_threshold) + + @staticmethod + def _move_data_entries(destination_eggdir, dist_data): + """Move data entries to their correct location.""" + dist_data = os.path.join(destination_eggdir, dist_data) + dist_data_scripts = os.path.join(dist_data, 'scripts') + if os.path.exists(dist_data_scripts): + egg_info_scripts = os.path.join( + destination_eggdir, 'EGG-INFO', 'scripts') + os.mkdir(egg_info_scripts) + for entry in os.listdir(dist_data_scripts): + # Remove bytecode, as it's not properly handled + # during easy_install scripts install phase. + if entry.endswith('.pyc'): + os.unlink(os.path.join(dist_data_scripts, entry)) + else: + os.rename( + os.path.join(dist_data_scripts, entry), + os.path.join(egg_info_scripts, entry), + ) + os.rmdir(dist_data_scripts) + for subdir in filter(os.path.exists, ( + os.path.join(dist_data, d) + for d in ('data', 'headers', 'purelib', 'platlib') + )): + unpack(subdir, destination_eggdir) + if os.path.exists(dist_data): + os.rmdir(dist_data) + + @staticmethod + def _fix_namespace_packages(egg_info, destination_eggdir): + namespace_packages = os.path.join( + egg_info, 'namespace_packages.txt') + if os.path.exists(namespace_packages): + with open(namespace_packages) as fp: + namespace_packages = fp.read().split() + for mod in namespace_packages: + mod_dir = os.path.join(destination_eggdir, *mod.split('.')) + mod_init = os.path.join(mod_dir, '__init__.py') + if not os.path.exists(mod_dir): + os.mkdir(mod_dir) + if not os.path.exists(mod_init): + with open(mod_init, 'w') as fp: + fp.write(NAMESPACE_PACKAGE_INIT) diff --git a/backend/test/lib/python3.8/site-packages/setuptools/windows_support.py b/backend/test/lib/python3.8/site-packages/setuptools/windows_support.py new file mode 100644 index 0000000000000000000000000000000000000000..cb977cff9545ef5d48ad7cf13f2cbe1ebc3e7cd0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/setuptools/windows_support.py @@ -0,0 +1,29 @@ +import platform +import ctypes + + +def windows_only(func): + if platform.system() != 'Windows': + return lambda *args, **kwargs: None + return func + + +@windows_only +def hide_file(path): + """ + Set the hidden attribute on a file or directory. + + From http://stackoverflow.com/questions/19622133/ + + `path` must be text. + """ + __import__('ctypes.wintypes') + SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW + SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD + SetFileAttributes.restype = ctypes.wintypes.BOOL + + FILE_ATTRIBUTE_HIDDEN = 0x02 + + ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN) + if not ret: + raise ctypes.WinError() diff --git a/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/LICENSE.rst b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/LICENSE.rst new file mode 100644 index 0000000000000000000000000000000000000000..c37cae49ec77ad6ebb25568c1605f1fee5313cfb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/LICENSE.rst @@ -0,0 +1,28 @@ +Copyright 2007 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..7719139e0bb53ea25f5df9bc71eab0b487c38a15 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/METADATA @@ -0,0 +1,118 @@ +Metadata-Version: 2.1 +Name: Werkzeug +Version: 2.3.7 +Summary: The comprehensive WSGI web application library. +Maintainer-email: Pallets <contact@palletsprojects.com> +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application +Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware +Classifier: Topic :: Software Development :: Libraries :: Application Frameworks +Requires-Dist: MarkupSafe>=2.1.1 +Requires-Dist: watchdog>=2.3 ; extra == "watchdog" +Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/ +Project-URL: Chat, https://discord.gg/pallets +Project-URL: Documentation, https://werkzeug.palletsprojects.com/ +Project-URL: Donate, https://palletsprojects.com/donate +Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/ +Project-URL: Source Code, https://github.com/pallets/werkzeug/ +Provides-Extra: watchdog + +Werkzeug +======== + +*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff") + +Werkzeug is a comprehensive `WSGI`_ web application library. It began as +a simple collection of various utilities for WSGI applications and has +become one of the most advanced WSGI utility libraries. + +It includes: + +- An interactive debugger that allows inspecting stack traces and + source code in the browser with an interactive interpreter for any + frame in the stack. +- A full-featured request object with objects to interact with + headers, query args, form data, files, and cookies. +- A response object that can wrap other WSGI applications and handle + streaming data. +- A routing system for matching URLs to endpoints and generating URLs + for endpoints, with an extensible system for capturing variables + from URLs. +- HTTP utilities to handle entity tags, cache control, dates, user + agents, cookies, files, and more. +- A threaded WSGI server for use while developing applications + locally. +- A test client for simulating HTTP requests during testing without + requiring running a server. + +Werkzeug doesn't enforce any dependencies. It is up to the developer to +choose a template engine, database adapter, and even how to handle +requests. It can be used to build all sorts of end user applications +such as blogs, wikis, or bulletin boards. + +`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while +providing more structure and patterns for defining powerful +applications. + +.. _WSGI: https://wsgi.readthedocs.io/en/latest/ +.. _Flask: https://www.palletsprojects.com/p/flask/ + + +Installing +---------- + +Install and update using `pip`_: + +.. code-block:: text + + pip install -U Werkzeug + +.. _pip: https://pip.pypa.io/en/stable/getting-started/ + + +A Simple Example +---------------- + +.. code-block:: python + + from werkzeug.wrappers import Request, Response + + @Request.application + def application(request): + return Response('Hello, World!') + + if __name__ == '__main__': + from werkzeug.serving import run_simple + run_simple('localhost', 4000, application) + + +Donate +------ + +The Pallets organization develops and supports Werkzeug and other +popular packages. In order to grow the community of contributors and +users, and allow the maintainers to devote more time to the projects, +`please donate today`_. + +.. _please donate today: https://palletsprojects.com/donate + + +Links +----- + +- Documentation: https://werkzeug.palletsprojects.com/ +- Changes: https://werkzeug.palletsprojects.com/changes/ +- PyPI Releases: https://pypi.org/project/Werkzeug/ +- Source Code: https://github.com/pallets/werkzeug/ +- Issue Tracker: https://github.com/pallets/werkzeug/issues/ +- Chat: https://discord.gg/pallets + diff --git a/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..588a9685106048758c90fffc79906357766f8db6 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/RECORD @@ -0,0 +1,125 @@ +werkzeug-2.3.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +werkzeug-2.3.7.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 +werkzeug-2.3.7.dist-info/METADATA,sha256=H_HVr2PWAD0AW95k1Q4-4s4m3vcM693t6Vf4YCh2FPk,4093 +werkzeug-2.3.7.dist-info/RECORD,, +werkzeug-2.3.7.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 +werkzeug/__init__.py,sha256=HjWydmi120RYL45Z8FRvqOcMGhuaKCGi80tfS7zF9GE,188 +werkzeug/__pycache__/__init__.cpython-38.pyc,, +werkzeug/__pycache__/_internal.cpython-38.pyc,, +werkzeug/__pycache__/_reloader.cpython-38.pyc,, +werkzeug/__pycache__/exceptions.cpython-38.pyc,, +werkzeug/__pycache__/formparser.cpython-38.pyc,, +werkzeug/__pycache__/http.cpython-38.pyc,, +werkzeug/__pycache__/local.cpython-38.pyc,, +werkzeug/__pycache__/security.cpython-38.pyc,, +werkzeug/__pycache__/serving.cpython-38.pyc,, +werkzeug/__pycache__/test.cpython-38.pyc,, +werkzeug/__pycache__/testapp.cpython-38.pyc,, +werkzeug/__pycache__/urls.cpython-38.pyc,, +werkzeug/__pycache__/user_agent.cpython-38.pyc,, +werkzeug/__pycache__/utils.cpython-38.pyc,, +werkzeug/__pycache__/wsgi.cpython-38.pyc,, +werkzeug/_internal.py,sha256=2AlGqWeqlbt7SlK03s2dg4SamrfsJDyIiVKpz4Og69w,8303 +werkzeug/_reloader.py,sha256=1O1DDWlqVwYIX8kgJwH5B4a_Uh6acQnw3sQf01JpXtM,14745 +werkzeug/datastructures/__init__.py,sha256=yzBdOT9DdK3nraNG49pA3bVsvtPPLx2-t2N8ZmuAd9w,1900 +werkzeug/datastructures/__pycache__/__init__.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/accept.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/auth.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/cache_control.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/csp.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/etag.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/file_storage.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/headers.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/mixins.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/range.cpython-38.pyc,, +werkzeug/datastructures/__pycache__/structures.cpython-38.pyc,, +werkzeug/datastructures/accept.py,sha256=CuCvBAxNzbt4QUb17rH986vvOVGURFUjo0DX2PQy_yI,10670 +werkzeug/datastructures/accept.pyi,sha256=6P114gncjZoy-i_n_3OQy2nJVwjEAIe7PcBxKYqCEfc,1917 +werkzeug/datastructures/auth.py,sha256=WGcJjnFmbDprqlDr54kXaD3zhK0E4NOoQVa4JHYl9iM,16043 +werkzeug/datastructures/cache_control.py,sha256=RTUipZev50s-1TAn2rYGZrytm_6IOIxQd67fkR5bNF0,6043 +werkzeug/datastructures/cache_control.pyi,sha256=6Q93jRysAKMPWRA72OMksyn7d3ZysuxwGlHp_iwF9pA,3756 +werkzeug/datastructures/csp.py,sha256=DAOAO266LK0JKbvlG80bbkAgfrNsnU9HBoz-FdIYNdo,3244 +werkzeug/datastructures/csp.pyi,sha256=AmDWiZU4rrJA4SZmyMNI1L5PLdIfJsI5Li9r5lE1q6M,5765 +werkzeug/datastructures/etag.py,sha256=JsyI-yXayF-hQu26MyFzbHFIZsaQ6odj3RZO_jF-_cc,2913 +werkzeug/datastructures/etag.pyi,sha256=N9cuUBrZnxHmsbW0BBmjKW-djNY7WKbI6t_WopB8Zo0,1047 +werkzeug/datastructures/file_storage.py,sha256=ePeMtr65s_1_sunXMv_SBOiFof5CX5BepYv5_W16fZk,6184 +werkzeug/datastructures/file_storage.pyi,sha256=2sdbKHhvbQF5FjrJuO6l_m1yZvZ4oPCUTspmdmjQlSU,1433 +werkzeug/datastructures/headers.py,sha256=V08N4VTcaA11fRq1WK5v28QomGd-A1S9CmiwugixhWo,18882 +werkzeug/datastructures/headers.pyi,sha256=66Gh9DbD8QNpLRBOuer4DMCj12csddHrcgxiJPLE5n8,4237 +werkzeug/datastructures/mixins.py,sha256=-IQSQ70UOMQlqtJEIyyhplOd4obaTOfzGvka-cunCtM,5337 +werkzeug/datastructures/mixins.pyi,sha256=y92tClxVslJBEGgAwDRsQLExfin2p0x7NfnP_b8w6xc,4191 +werkzeug/datastructures/range.py,sha256=JXSDPseG7iH5giJp3R1SnQC_SqQp634M8Iv6QTsbTxM,5669 +werkzeug/datastructures/range.pyi,sha256=bsM61iNp86gT2lyN0F_Dqg8xsnfPerdmElipuHppiJQ,1792 +werkzeug/datastructures/structures.py,sha256=_bhAf0adEk6WU2uy8jdmuxFMTFcuClY1p7jQ-3wYXj4,31761 +werkzeug/datastructures/structures.pyi,sha256=MRg-RubT3UPjh62i9-7Xht8DVL0zTApRzjs52Hfz_j4,8148 +werkzeug/debug/__init__.py,sha256=WRTLJSvnuK6jlBuQLllTnN57th0HKPjxbS7-d8QJZIc,18760 +werkzeug/debug/__pycache__/__init__.cpython-38.pyc,, +werkzeug/debug/__pycache__/console.cpython-38.pyc,, +werkzeug/debug/__pycache__/repr.cpython-38.pyc,, +werkzeug/debug/__pycache__/tbtools.cpython-38.pyc,, +werkzeug/debug/console.py,sha256=FIO8gDX2eQ1_4MtpJ4s0i2gR4fFCJZTPwhSVByF4kbo,6068 +werkzeug/debug/repr.py,sha256=ECmIpNVlCppTfCuIuEgrJVfuhr8iDqPSWeVJyxt1QOM,9328 +werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222 +werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507 +werkzeug/debug/shared/debugger.js,sha256=FVBBUirz4kKedIbM08QQCYeEoicoSbnm4BnBF4dCYfA,10562 +werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191 +werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200 +werkzeug/debug/shared/style.css,sha256=-xSxzUEZGw_IqlDR5iZxitNl8LQUjBM-_Y4UAvXVH8g,6078 +werkzeug/debug/tbtools.py,sha256=8Xg7p2JzCC1AMWuse5HYc594OdzC5ToeJbNk49_zZCc,13271 +werkzeug/exceptions.py,sha256=d6VNzGcVgLazIpfwRD8pN_d3yAJNyngBDFvlXQbR-38,26062 +werkzeug/formparser.py,sha256=DZ9BeiHAah3_CuBORNOEipRwE74lHRFX1eK2_3XKcL4,19574 +werkzeug/http.py,sha256=morM5oaClzgpBEjShgUVPlj4asZCvYuZ8WqjVaZzXtQ,48754 +werkzeug/local.py,sha256=Jawgwa7Q7orExtyzVlhbh-4mGWg3v63bspIU5Nnl-DU,22003 +werkzeug/middleware/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/middleware/__pycache__/__init__.cpython-38.pyc,, +werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc,, +werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc,, +werkzeug/middleware/__pycache__/lint.cpython-38.pyc,, +werkzeug/middleware/__pycache__/profiler.cpython-38.pyc,, +werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc,, +werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc,, +werkzeug/middleware/dispatcher.py,sha256=6ltzPtDsIdLTY_T1GW6kxBJL0KZftbipa_WVdKtpVQ8,2601 +werkzeug/middleware/http_proxy.py,sha256=vsSvt84m656x3mV_Fj78y7O2eYHmurWngErTcjeiz8U,7833 +werkzeug/middleware/lint.py,sha256=6CqcwMWro1p-GRUGPgQ1n21KFnTTqc6-81CGTzpcK74,13916 +werkzeug/middleware/profiler.py,sha256=KKr8nAiF9dr9pNd3G0D3xs7mUba9gvWkyK7X9ceke70,4906 +werkzeug/middleware/proxy_fix.py,sha256=dcOOSjSok2QsSh1VSNsw-a0Vy_Jn5DunlO6PRbXBq0A,6754 +werkzeug/middleware/shared_data.py,sha256=DeM8OouhfhZs8w5T7Wxw-uKuOHXoH0x5RopzxR2RRjI,9513 +werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/routing/__init__.py,sha256=HpvahY7WwkLdV4Cq3Bsc3GrqNon4u6t8-vhbb9E5o00,4819 +werkzeug/routing/__pycache__/__init__.cpython-38.pyc,, +werkzeug/routing/__pycache__/converters.cpython-38.pyc,, +werkzeug/routing/__pycache__/exceptions.cpython-38.pyc,, +werkzeug/routing/__pycache__/map.cpython-38.pyc,, +werkzeug/routing/__pycache__/matcher.cpython-38.pyc,, +werkzeug/routing/__pycache__/rules.cpython-38.pyc,, +werkzeug/routing/converters.py,sha256=V8e_wMRop6WG4Kymu4pBIR8OrJl-ZUQUZlinUXfw7WE,7602 +werkzeug/routing/exceptions.py,sha256=yGZ5AUL-buHp-vK8AJbZ0bLIbSckh1UyiGKgRg4ZjaA,4698 +werkzeug/routing/map.py,sha256=2tirw9j5wypzsUT6WBcBNcBTqNp0_iBXnF_1vhY9HjI,37403 +werkzeug/routing/matcher.py,sha256=FyPG45iqR1XwxFujejSqfNEKV7IgbR2td7Jp-ocSASY,7817 +werkzeug/routing/rules.py,sha256=THxBzPRlK87nyf8qoBH96MuHk8G5KiSJkQEay4zZmzY,32058 +werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +werkzeug/sansio/__pycache__/__init__.cpython-38.pyc,, +werkzeug/sansio/__pycache__/http.cpython-38.pyc,, +werkzeug/sansio/__pycache__/multipart.cpython-38.pyc,, +werkzeug/sansio/__pycache__/request.cpython-38.pyc,, +werkzeug/sansio/__pycache__/response.cpython-38.pyc,, +werkzeug/sansio/__pycache__/utils.cpython-38.pyc,, +werkzeug/sansio/http.py,sha256=mKTbXo_squCAZKjt9yzfPFV8ZqQbfa6mjdc6XoeLNZ0,6234 +werkzeug/sansio/multipart.py,sha256=XM53Ud4YXicPEqm0HgcS3loMfMCt3NoAx8DmImxaW0g,11113 +werkzeug/sansio/request.py,sha256=wEeVGySwlOfJT5xlgQzjJOe2ksky70CJT75QTzkvfqM,24243 +werkzeug/sansio/response.py,sha256=6DgROSXWG_0XzOrJi_U8PRMNGKX8YV-uHPu0cLrOKsk,29010 +werkzeug/sansio/utils.py,sha256=LYgmrN7yr04ZDVk5flPcUJLo1rDnTzhF04OH3-ujCWQ,4950 +werkzeug/security.py,sha256=gEH8qD5Ykgn6W6PgMx2CQx-iNqJFenXXqOGiWDi_3eE,5814 +werkzeug/serving.py,sha256=Ql_SUZxsmQzN8OZ-hDvKFQ5nRgKh6FEIYwcXVEmD6qU,39224 +werkzeug/test.py,sha256=xOnp3B6V2MQ0Qn3jL7eMimJna2-zqI04wD_IazDKCto,55733 +werkzeug/testapp.py,sha256=Q7SXVDXeXnnXo7-TWVoAJCTF2GnXxoH-v5_pvjUyTWc,6135 +werkzeug/urls.py,sha256=Uq_cu8TmZFHkQ7t2pp9DNwDvs6wG76jzWPstQIssPVk,45683 +werkzeug/user_agent.py,sha256=lSlLYKCcbzCUSkbdAoO8zPk2UR-8Mdn6iu_iA2kYPBA,1416 +werkzeug/utils.py,sha256=DYkOtfDR_Wc3ro3_peReo9KkUC-6yhOvz27_PUAckbA,24654 +werkzeug/wrappers/__init__.py,sha256=kGyK7rOud3qCxll_jFyW15YarJhj1xtdf3ocx9ZheB8,120 +werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/request.cpython-38.pyc,, +werkzeug/wrappers/__pycache__/response.cpython-38.pyc,, +werkzeug/wrappers/request.py,sha256=_PIbgCZ9xfQXC9HEjm-j1R-F4gSPcx5q-QT983mMzbs,24848 +werkzeug/wrappers/response.py,sha256=FfGesquK6cSdPTFZvzV42CM__Ohta2cxNqLBDRkAuKA,32664 +werkzeug/wsgi.py,sha256=PGkhajtHnJj2NqYpYW_T8w17JJbaH8iI0wHHNkPvJKs,29153 diff --git a/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..3b5e64b5e6c4a210201d1676a891fd57b15cda99 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug-2.3.7.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__init__.py b/backend/test/lib/python3.8/site-packages/werkzeug/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d34bfe6391d830f849a2f1c1b21e736bc246bde0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/__init__.py @@ -0,0 +1,6 @@ +from .serving import run_simple as run_simple +from .test import Client as Client +from .wrappers import Request as Request +from .wrappers import Response as Response + +__version__ = "2.3.7" diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..093604d4f50b81ed212b69b95f809948a3256deb Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/_internal.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/_internal.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eadcea94a26d8e2f1a48935e06a2d9f67b81862f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/_internal.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/_reloader.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/_reloader.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cb6133d99db6f321a4bf26130bba20e55813e291 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/_reloader.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/exceptions.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7783e896e39afad2c0049643c504a6e36276679a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/exceptions.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/formparser.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/formparser.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a61ef17d518f440a52dd6928294712200780b4d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/formparser.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/http.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/http.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e42dc10cd039f1d21e4a19a637290d1d4bd2bdb7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/http.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/local.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/local.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6bffcd7703ddc3c1095d39399f14ef2c3b0b9908 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/local.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/security.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/security.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d8afb524b59085b4ba18c441d3bc684b6ce7db5e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/security.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/serving.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/serving.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d4c3fd582d32cbfa569e17054141732b56a698c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/serving.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/test.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/test.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a1f1e902b851d9bf480adb41720052443c0a8192 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/test.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/testapp.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/testapp.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93dba2aeaa03dc8fbc7366db7a409d77b1abbee7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/testapp.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/urls.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/urls.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1657f94abef7738cdf2dedeb095d48f1c1bd633d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/urls.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/user_agent.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/user_agent.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a25f86dd06afd0be4f1b35a1af5e01bceabf29fd Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/user_agent.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/utils.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f144aa72891e0e579ac57928eab1c80b3c1384f6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/utils.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/wsgi.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/wsgi.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b22bd29887972e914435b2edf0e6fc494b35c828 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/__pycache__/wsgi.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/_internal.py b/backend/test/lib/python3.8/site-packages/werkzeug/_internal.py new file mode 100644 index 0000000000000000000000000000000000000000..6ed4d3024bffefc9dcc8a08a25d0176f1e9d40a2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/_internal.py @@ -0,0 +1,330 @@ +from __future__ import annotations + +import logging +import operator +import re +import sys +import typing as t +from datetime import datetime +from datetime import timezone + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIEnvironment + from .wrappers.request import Request + +_logger: logging.Logger | None = None + + +class _Missing: + def __repr__(self) -> str: + return "no value" + + def __reduce__(self) -> str: + return "_missing" + + +_missing = _Missing() + + +@t.overload +def _make_encode_wrapper(reference: str) -> t.Callable[[str], str]: + ... + + +@t.overload +def _make_encode_wrapper(reference: bytes) -> t.Callable[[str], bytes]: + ... + + +def _make_encode_wrapper(reference: t.AnyStr) -> t.Callable[[str], t.AnyStr]: + """Create a function that will be called with a string argument. If + the reference is bytes, values will be encoded to bytes. + """ + if isinstance(reference, str): + return lambda x: x + + return operator.methodcaller("encode", "latin1") + + +def _check_str_tuple(value: tuple[t.AnyStr, ...]) -> None: + """Ensure tuple items are all strings or all bytes.""" + if not value: + return + + item_type = str if isinstance(value[0], str) else bytes + + if any(not isinstance(item, item_type) for item in value): + raise TypeError(f"Cannot mix str and bytes arguments (got {value!r})") + + +_default_encoding = sys.getdefaultencoding() + + +def _to_bytes( + x: str | bytes, charset: str = _default_encoding, errors: str = "strict" +) -> bytes: + if x is None or isinstance(x, bytes): + return x + + if isinstance(x, (bytearray, memoryview)): + return bytes(x) + + if isinstance(x, str): + return x.encode(charset, errors) + + raise TypeError("Expected bytes") + + +@t.overload +def _to_str( # type: ignore + x: None, + charset: str | None = ..., + errors: str = ..., + allow_none_charset: bool = ..., +) -> None: + ... + + +@t.overload +def _to_str( + x: t.Any, + charset: str | None = ..., + errors: str = ..., + allow_none_charset: bool = ..., +) -> str: + ... + + +def _to_str( + x: t.Any | None, + charset: str | None = _default_encoding, + errors: str = "strict", + allow_none_charset: bool = False, +) -> str | bytes | None: + if x is None or isinstance(x, str): + return x + + if not isinstance(x, (bytes, bytearray)): + return str(x) + + if charset is None: + if allow_none_charset: + return x + + return x.decode(charset, errors) # type: ignore + + +def _wsgi_decoding_dance( + s: str, charset: str = "utf-8", errors: str = "replace" +) -> str: + return s.encode("latin1").decode(charset, errors) + + +def _wsgi_encoding_dance(s: str, charset: str = "utf-8", errors: str = "strict") -> str: + return s.encode(charset).decode("latin1", errors) + + +def _get_environ(obj: WSGIEnvironment | Request) -> WSGIEnvironment: + env = getattr(obj, "environ", obj) + assert isinstance( + env, dict + ), f"{type(obj).__name__!r} is not a WSGI environment (has to be a dict)" + return env + + +def _has_level_handler(logger: logging.Logger) -> bool: + """Check if there is a handler in the logging chain that will handle + the given logger's effective level. + """ + level = logger.getEffectiveLevel() + current = logger + + while current: + if any(handler.level <= level for handler in current.handlers): + return True + + if not current.propagate: + break + + current = current.parent # type: ignore + + return False + + +class _ColorStreamHandler(logging.StreamHandler): + """On Windows, wrap stream with Colorama for ANSI style support.""" + + def __init__(self) -> None: + try: + import colorama + except ImportError: + stream = None + else: + stream = colorama.AnsiToWin32(sys.stderr) + + super().__init__(stream) + + +def _log(type: str, message: str, *args: t.Any, **kwargs: t.Any) -> None: + """Log a message to the 'werkzeug' logger. + + The logger is created the first time it is needed. If there is no + level set, it is set to :data:`logging.INFO`. If there is no handler + for the logger's effective level, a :class:`logging.StreamHandler` + is added. + """ + global _logger + + if _logger is None: + _logger = logging.getLogger("werkzeug") + + if _logger.level == logging.NOTSET: + _logger.setLevel(logging.INFO) + + if not _has_level_handler(_logger): + _logger.addHandler(_ColorStreamHandler()) + + getattr(_logger, type)(message.rstrip(), *args, **kwargs) + + +@t.overload +def _dt_as_utc(dt: None) -> None: + ... + + +@t.overload +def _dt_as_utc(dt: datetime) -> datetime: + ... + + +def _dt_as_utc(dt: datetime | None) -> datetime | None: + if dt is None: + return dt + + if dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + elif dt.tzinfo != timezone.utc: + return dt.astimezone(timezone.utc) + + return dt + + +_TAccessorValue = t.TypeVar("_TAccessorValue") + + +class _DictAccessorProperty(t.Generic[_TAccessorValue]): + """Baseclass for `environ_property` and `header_property`.""" + + read_only = False + + def __init__( + self, + name: str, + default: _TAccessorValue | None = None, + load_func: t.Callable[[str], _TAccessorValue] | None = None, + dump_func: t.Callable[[_TAccessorValue], str] | None = None, + read_only: bool | None = None, + doc: str | None = None, + ) -> None: + self.name = name + self.default = default + self.load_func = load_func + self.dump_func = dump_func + if read_only is not None: + self.read_only = read_only + self.__doc__ = doc + + def lookup(self, instance: t.Any) -> t.MutableMapping[str, t.Any]: + raise NotImplementedError + + @t.overload + def __get__( + self, instance: None, owner: type + ) -> _DictAccessorProperty[_TAccessorValue]: + ... + + @t.overload + def __get__(self, instance: t.Any, owner: type) -> _TAccessorValue: + ... + + def __get__( + self, instance: t.Any | None, owner: type + ) -> _TAccessorValue | _DictAccessorProperty[_TAccessorValue]: + if instance is None: + return self + + storage = self.lookup(instance) + + if self.name not in storage: + return self.default # type: ignore + + value = storage[self.name] + + if self.load_func is not None: + try: + return self.load_func(value) + except (ValueError, TypeError): + return self.default # type: ignore + + return value # type: ignore + + def __set__(self, instance: t.Any, value: _TAccessorValue) -> None: + if self.read_only: + raise AttributeError("read only property") + + if self.dump_func is not None: + self.lookup(instance)[self.name] = self.dump_func(value) + else: + self.lookup(instance)[self.name] = value + + def __delete__(self, instance: t.Any) -> None: + if self.read_only: + raise AttributeError("read only property") + + self.lookup(instance).pop(self.name, None) + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.name}>" + + +def _decode_idna(domain: str) -> str: + try: + data = domain.encode("ascii") + except UnicodeEncodeError: + # If the domain is not ASCII, it's decoded already. + return domain + + try: + # Try decoding in one shot. + return data.decode("idna") + except UnicodeDecodeError: + pass + + # Decode each part separately, leaving invalid parts as punycode. + parts = [] + + for part in data.split(b"."): + try: + parts.append(part.decode("idna")) + except UnicodeDecodeError: + parts.append(part.decode("ascii")) + + return ".".join(parts) + + +_plain_int_re = re.compile(r"-?\d+", re.ASCII) + + +def _plain_int(value: str) -> int: + """Parse an int only if it is only ASCII digits and ``-``. + + This disallows ``+``, ``_``, and non-ASCII digits, which are accepted by ``int`` but + are not allowed in HTTP header values. + + Any leading or trailing whitespace is stripped + """ + value = value.strip() + if _plain_int_re.fullmatch(value) is None: + raise ValueError + + return int(value) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/_reloader.py b/backend/test/lib/python3.8/site-packages/werkzeug/_reloader.py new file mode 100644 index 0000000000000000000000000000000000000000..c8683593f74bda1d1c7e5f479b6e41f54f58d9eb --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/_reloader.py @@ -0,0 +1,458 @@ +from __future__ import annotations + +import fnmatch +import os +import subprocess +import sys +import threading +import time +import typing as t +from itertools import chain +from pathlib import PurePath + +from ._internal import _log + +# The various system prefixes where imports are found. Base values are +# different when running in a virtualenv. All reloaders will ignore the +# base paths (usually the system installation). The stat reloader won't +# scan the virtualenv paths, it will only include modules that are +# already imported. +_ignore_always = tuple({sys.base_prefix, sys.base_exec_prefix}) +prefix = {*_ignore_always, sys.prefix, sys.exec_prefix} + +if hasattr(sys, "real_prefix"): + # virtualenv < 20 + prefix.add(sys.real_prefix) + +_stat_ignore_scan = tuple(prefix) +del prefix +_ignore_common_dirs = { + "__pycache__", + ".git", + ".hg", + ".tox", + ".nox", + ".pytest_cache", + ".mypy_cache", +} + + +def _iter_module_paths() -> t.Iterator[str]: + """Find the filesystem paths associated with imported modules.""" + # List is in case the value is modified by the app while updating. + for module in list(sys.modules.values()): + name = getattr(module, "__file__", None) + + if name is None or name.startswith(_ignore_always): + continue + + while not os.path.isfile(name): + # Zip file, find the base file without the module path. + old = name + name = os.path.dirname(name) + + if name == old: # skip if it was all directories somehow + break + else: + yield name + + +def _remove_by_pattern(paths: set[str], exclude_patterns: set[str]) -> None: + for pattern in exclude_patterns: + paths.difference_update(fnmatch.filter(paths, pattern)) + + +def _find_stat_paths( + extra_files: set[str], exclude_patterns: set[str] +) -> t.Iterable[str]: + """Find paths for the stat reloader to watch. Returns imported + module files, Python files under non-system paths. Extra files and + Python files under extra directories can also be scanned. + + System paths have to be excluded for efficiency. Non-system paths, + such as a project root or ``sys.path.insert``, should be the paths + of interest to the user anyway. + """ + paths = set() + + for path in chain(list(sys.path), extra_files): + path = os.path.abspath(path) + + if os.path.isfile(path): + # zip file on sys.path, or extra file + paths.add(path) + continue + + parent_has_py = {os.path.dirname(path): True} + + for root, dirs, files in os.walk(path): + # Optimizations: ignore system prefixes, __pycache__ will + # have a py or pyc module at the import path, ignore some + # common known dirs such as version control and tool caches. + if ( + root.startswith(_stat_ignore_scan) + or os.path.basename(root) in _ignore_common_dirs + ): + dirs.clear() + continue + + has_py = False + + for name in files: + if name.endswith((".py", ".pyc")): + has_py = True + paths.add(os.path.join(root, name)) + + # Optimization: stop scanning a directory if neither it nor + # its parent contained Python files. + if not (has_py or parent_has_py[os.path.dirname(root)]): + dirs.clear() + continue + + parent_has_py[root] = has_py + + paths.update(_iter_module_paths()) + _remove_by_pattern(paths, exclude_patterns) + return paths + + +def _find_watchdog_paths( + extra_files: set[str], exclude_patterns: set[str] +) -> t.Iterable[str]: + """Find paths for the stat reloader to watch. Looks at the same + sources as the stat reloader, but watches everything under + directories instead of individual files. + """ + dirs = set() + + for name in chain(list(sys.path), extra_files): + name = os.path.abspath(name) + + if os.path.isfile(name): + name = os.path.dirname(name) + + dirs.add(name) + + for name in _iter_module_paths(): + dirs.add(os.path.dirname(name)) + + _remove_by_pattern(dirs, exclude_patterns) + return _find_common_roots(dirs) + + +def _find_common_roots(paths: t.Iterable[str]) -> t.Iterable[str]: + root: dict[str, dict] = {} + + for chunks in sorted((PurePath(x).parts for x in paths), key=len, reverse=True): + node = root + + for chunk in chunks: + node = node.setdefault(chunk, {}) + + node.clear() + + rv = set() + + def _walk(node: t.Mapping[str, dict], path: tuple[str, ...]) -> None: + for prefix, child in node.items(): + _walk(child, path + (prefix,)) + + if not node: + rv.add(os.path.join(*path)) + + _walk(root, ()) + return rv + + +def _get_args_for_reloading() -> list[str]: + """Determine how the script was executed, and return the args needed + to execute it again in a new process. + """ + if sys.version_info >= (3, 10): + # sys.orig_argv, added in Python 3.10, contains the exact args used to invoke + # Python. Still replace argv[0] with sys.executable for accuracy. + return [sys.executable, *sys.orig_argv[1:]] + + rv = [sys.executable] + py_script = sys.argv[0] + args = sys.argv[1:] + # Need to look at main module to determine how it was executed. + __main__ = sys.modules["__main__"] + + # The value of __package__ indicates how Python was called. It may + # not exist if a setuptools script is installed as an egg. It may be + # set incorrectly for entry points created with pip on Windows. + if getattr(__main__, "__package__", None) is None or ( + os.name == "nt" + and __main__.__package__ == "" + and not os.path.exists(py_script) + and os.path.exists(f"{py_script}.exe") + ): + # Executed a file, like "python app.py". + py_script = os.path.abspath(py_script) + + if os.name == "nt": + # Windows entry points have ".exe" extension and should be + # called directly. + if not os.path.exists(py_script) and os.path.exists(f"{py_script}.exe"): + py_script += ".exe" + + if ( + os.path.splitext(sys.executable)[1] == ".exe" + and os.path.splitext(py_script)[1] == ".exe" + ): + rv.pop(0) + + rv.append(py_script) + else: + # Executed a module, like "python -m werkzeug.serving". + if os.path.isfile(py_script): + # Rewritten by Python from "-m script" to "/path/to/script.py". + py_module = t.cast(str, __main__.__package__) + name = os.path.splitext(os.path.basename(py_script))[0] + + if name != "__main__": + py_module += f".{name}" + else: + # Incorrectly rewritten by pydevd debugger from "-m script" to "script". + py_module = py_script + + rv.extend(("-m", py_module.lstrip("."))) + + rv.extend(args) + return rv + + +class ReloaderLoop: + name = "" + + def __init__( + self, + extra_files: t.Iterable[str] | None = None, + exclude_patterns: t.Iterable[str] | None = None, + interval: int | float = 1, + ) -> None: + self.extra_files: set[str] = {os.path.abspath(x) for x in extra_files or ()} + self.exclude_patterns: set[str] = set(exclude_patterns or ()) + self.interval = interval + + def __enter__(self) -> ReloaderLoop: + """Do any setup, then run one step of the watch to populate the + initial filesystem state. + """ + self.run_step() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): # type: ignore + """Clean up any resources associated with the reloader.""" + pass + + def run(self) -> None: + """Continually run the watch step, sleeping for the configured + interval after each step. + """ + while True: + self.run_step() + time.sleep(self.interval) + + def run_step(self) -> None: + """Run one step for watching the filesystem. Called once to set + up initial state, then repeatedly to update it. + """ + pass + + def restart_with_reloader(self) -> int: + """Spawn a new Python interpreter with the same arguments as the + current one, but running the reloader thread. + """ + while True: + _log("info", f" * Restarting with {self.name}") + args = _get_args_for_reloading() + new_environ = os.environ.copy() + new_environ["WERKZEUG_RUN_MAIN"] = "true" + exit_code = subprocess.call(args, env=new_environ, close_fds=False) + + if exit_code != 3: + return exit_code + + def trigger_reload(self, filename: str) -> None: + self.log_reload(filename) + sys.exit(3) + + def log_reload(self, filename: str) -> None: + filename = os.path.abspath(filename) + _log("info", f" * Detected change in {filename!r}, reloading") + + +class StatReloaderLoop(ReloaderLoop): + name = "stat" + + def __enter__(self) -> ReloaderLoop: + self.mtimes: dict[str, float] = {} + return super().__enter__() + + def run_step(self) -> None: + for name in _find_stat_paths(self.extra_files, self.exclude_patterns): + try: + mtime = os.stat(name).st_mtime + except OSError: + continue + + old_time = self.mtimes.get(name) + + if old_time is None: + self.mtimes[name] = mtime + continue + + if mtime > old_time: + self.trigger_reload(name) + + +class WatchdogReloaderLoop(ReloaderLoop): + def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: + from watchdog.observers import Observer + from watchdog.events import PatternMatchingEventHandler + from watchdog.events import EVENT_TYPE_OPENED + from watchdog.events import FileModifiedEvent + + super().__init__(*args, **kwargs) + trigger_reload = self.trigger_reload + + class EventHandler(PatternMatchingEventHandler): + def on_any_event(self, event: FileModifiedEvent): # type: ignore + if event.event_type == EVENT_TYPE_OPENED: + return + + trigger_reload(event.src_path) + + reloader_name = Observer.__name__.lower() # type: ignore[attr-defined] + + if reloader_name.endswith("observer"): + reloader_name = reloader_name[:-8] + + self.name = f"watchdog ({reloader_name})" + self.observer = Observer() + # Extra patterns can be non-Python files, match them in addition + # to all Python files in default and extra directories. Ignore + # __pycache__ since a change there will always have a change to + # the source file (or initial pyc file) as well. Ignore Git and + # Mercurial internal changes. + extra_patterns = [p for p in self.extra_files if not os.path.isdir(p)] + self.event_handler = EventHandler( + patterns=["*.py", "*.pyc", "*.zip", *extra_patterns], + ignore_patterns=[ + *[f"*/{d}/*" for d in _ignore_common_dirs], + *self.exclude_patterns, + ], + ) + self.should_reload = False + + def trigger_reload(self, filename: str) -> None: + # This is called inside an event handler, which means throwing + # SystemExit has no effect. + # https://github.com/gorakhargosh/watchdog/issues/294 + self.should_reload = True + self.log_reload(filename) + + def __enter__(self) -> ReloaderLoop: + self.watches: dict[str, t.Any] = {} + self.observer.start() + return super().__enter__() + + def __exit__(self, exc_type, exc_val, exc_tb): # type: ignore + self.observer.stop() + self.observer.join() + + def run(self) -> None: + while not self.should_reload: + self.run_step() + time.sleep(self.interval) + + sys.exit(3) + + def run_step(self) -> None: + to_delete = set(self.watches) + + for path in _find_watchdog_paths(self.extra_files, self.exclude_patterns): + if path not in self.watches: + try: + self.watches[path] = self.observer.schedule( + self.event_handler, path, recursive=True + ) + except OSError: + # Clear this path from list of watches We don't want + # the same error message showing again in the next + # iteration. + self.watches[path] = None + + to_delete.discard(path) + + for path in to_delete: + watch = self.watches.pop(path, None) + + if watch is not None: + self.observer.unschedule(watch) + + +reloader_loops: dict[str, type[ReloaderLoop]] = { + "stat": StatReloaderLoop, + "watchdog": WatchdogReloaderLoop, +} + +try: + __import__("watchdog.observers") +except ImportError: + reloader_loops["auto"] = reloader_loops["stat"] +else: + reloader_loops["auto"] = reloader_loops["watchdog"] + + +def ensure_echo_on() -> None: + """Ensure that echo mode is enabled. Some tools such as PDB disable + it which causes usability issues after a reload.""" + # tcgetattr will fail if stdin isn't a tty + if sys.stdin is None or not sys.stdin.isatty(): + return + + try: + import termios + except ImportError: + return + + attributes = termios.tcgetattr(sys.stdin) + + if not attributes[3] & termios.ECHO: + attributes[3] |= termios.ECHO + termios.tcsetattr(sys.stdin, termios.TCSANOW, attributes) + + +def run_with_reloader( + main_func: t.Callable[[], None], + extra_files: t.Iterable[str] | None = None, + exclude_patterns: t.Iterable[str] | None = None, + interval: int | float = 1, + reloader_type: str = "auto", +) -> None: + """Run the given function in an independent Python interpreter.""" + import signal + + signal.signal(signal.SIGTERM, lambda *args: sys.exit(0)) + reloader = reloader_loops[reloader_type]( + extra_files=extra_files, exclude_patterns=exclude_patterns, interval=interval + ) + + try: + if os.environ.get("WERKZEUG_RUN_MAIN") == "true": + ensure_echo_on() + t = threading.Thread(target=main_func, args=()) + t.daemon = True + + # Enter the reloader to set up initial state, then start + # the app thread and reloader update loop. + with reloader: + t.start() + reloader.run() + else: + sys.exit(reloader.restart_with_reloader()) + except KeyboardInterrupt: + pass diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__init__.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..846ffce678461d6f87bf1a01b3312f2ed895e440 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__init__.py @@ -0,0 +1,34 @@ +from .accept import Accept as Accept +from .accept import CharsetAccept as CharsetAccept +from .accept import LanguageAccept as LanguageAccept +from .accept import MIMEAccept as MIMEAccept +from .auth import Authorization as Authorization +from .auth import WWWAuthenticate as WWWAuthenticate +from .cache_control import RequestCacheControl as RequestCacheControl +from .cache_control import ResponseCacheControl as ResponseCacheControl +from .csp import ContentSecurityPolicy as ContentSecurityPolicy +from .etag import ETags as ETags +from .file_storage import FileMultiDict as FileMultiDict +from .file_storage import FileStorage as FileStorage +from .headers import EnvironHeaders as EnvironHeaders +from .headers import Headers as Headers +from .mixins import ImmutableDictMixin as ImmutableDictMixin +from .mixins import ImmutableHeadersMixin as ImmutableHeadersMixin +from .mixins import ImmutableListMixin as ImmutableListMixin +from .mixins import ImmutableMultiDictMixin as ImmutableMultiDictMixin +from .mixins import UpdateDictMixin as UpdateDictMixin +from .range import ContentRange as ContentRange +from .range import IfRange as IfRange +from .range import Range as Range +from .structures import CallbackDict as CallbackDict +from .structures import CombinedMultiDict as CombinedMultiDict +from .structures import HeaderSet as HeaderSet +from .structures import ImmutableDict as ImmutableDict +from .structures import ImmutableList as ImmutableList +from .structures import ImmutableMultiDict as ImmutableMultiDict +from .structures import ImmutableOrderedMultiDict as ImmutableOrderedMultiDict +from .structures import ImmutableTypeConversionDict as ImmutableTypeConversionDict +from .structures import iter_multi_items as iter_multi_items +from .structures import MultiDict as MultiDict +from .structures import OrderedMultiDict as OrderedMultiDict +from .structures import TypeConversionDict as TypeConversionDict diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ad323a9cac253d3347d9d6d436f8cf429633231 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/accept.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/accept.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8ad14448cc7cd67bda96b4b147e9e5ffbddefd18 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/accept.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/auth.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/auth.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..605cce70b776fd484cb0305230e1d9d89dcb8f12 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/auth.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/cache_control.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/cache_control.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eef8c47c73a9d95a48438fd5c7bf8de60096256f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/cache_control.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/csp.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/csp.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1778861f945da1c7924c19bf078653149c97b0ef Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/csp.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/etag.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/etag.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e16c64e49282128d10f0cfa77d714e2d0619d5b8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/etag.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/file_storage.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/file_storage.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4cf742444ebecb0b9dd3505cc28315abec59c15d Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/file_storage.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/headers.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/headers.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..05f563bf24cda4358f371c807869d3e10f7a5e5a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/headers.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/mixins.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/mixins.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..032326e462ae071369e49af58e7377c85a0af480 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/mixins.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/range.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/range.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7fe3b22d0612b2a4aa24290407ec9669c675956a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/range.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/structures.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/structures.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ce278faca1eaa47393635cba43d6f2fd9ddada5 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/__pycache__/structures.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/accept.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/accept.py new file mode 100644 index 0000000000000000000000000000000000000000..d80f0bbb850e9b0f967fe7a1dc2b4975527bd350 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/accept.py @@ -0,0 +1,326 @@ +from __future__ import annotations + +import codecs +import re + +from .structures import ImmutableList + + +class Accept(ImmutableList): + """An :class:`Accept` object is just a list subclass for lists of + ``(value, quality)`` tuples. It is automatically sorted by specificity + and quality. + + All :class:`Accept` objects work similar to a list but provide extra + functionality for working with the data. Containment checks are + normalized to the rules of that header: + + >>> a = CharsetAccept([('ISO-8859-1', 1), ('utf-8', 0.7)]) + >>> a.best + 'ISO-8859-1' + >>> 'iso-8859-1' in a + True + >>> 'UTF8' in a + True + >>> 'utf7' in a + False + + To get the quality for an item you can use normal item lookup: + + >>> print a['utf-8'] + 0.7 + >>> a['utf7'] + 0 + + .. versionchanged:: 0.5 + :class:`Accept` objects are forced immutable now. + + .. versionchanged:: 1.0.0 + :class:`Accept` internal values are no longer ordered + alphabetically for equal quality tags. Instead the initial + order is preserved. + + """ + + def __init__(self, values=()): + if values is None: + list.__init__(self) + self.provided = False + elif isinstance(values, Accept): + self.provided = values.provided + list.__init__(self, values) + else: + self.provided = True + values = sorted( + values, key=lambda x: (self._specificity(x[0]), x[1]), reverse=True + ) + list.__init__(self, values) + + def _specificity(self, value): + """Returns a tuple describing the value's specificity.""" + return (value != "*",) + + def _value_matches(self, value, item): + """Check if a value matches a given accept item.""" + return item == "*" or item.lower() == value.lower() + + def __getitem__(self, key): + """Besides index lookup (getting item n) you can also pass it a string + to get the quality for the item. If the item is not in the list, the + returned quality is ``0``. + """ + if isinstance(key, str): + return self.quality(key) + return list.__getitem__(self, key) + + def quality(self, key): + """Returns the quality of the key. + + .. versionadded:: 0.6 + In previous versions you had to use the item-lookup syntax + (eg: ``obj[key]`` instead of ``obj.quality(key)``) + """ + for item, quality in self: + if self._value_matches(key, item): + return quality + return 0 + + def __contains__(self, value): + for item, _quality in self: + if self._value_matches(value, item): + return True + return False + + def __repr__(self): + pairs_str = ", ".join(f"({x!r}, {y})" for x, y in self) + return f"{type(self).__name__}([{pairs_str}])" + + def index(self, key): + """Get the position of an entry or raise :exc:`ValueError`. + + :param key: The key to be looked up. + + .. versionchanged:: 0.5 + This used to raise :exc:`IndexError`, which was inconsistent + with the list API. + """ + if isinstance(key, str): + for idx, (item, _quality) in enumerate(self): + if self._value_matches(key, item): + return idx + raise ValueError(key) + return list.index(self, key) + + def find(self, key): + """Get the position of an entry or return -1. + + :param key: The key to be looked up. + """ + try: + return self.index(key) + except ValueError: + return -1 + + def values(self): + """Iterate over all values.""" + for item in self: + yield item[0] + + def to_header(self): + """Convert the header set into an HTTP header string.""" + result = [] + for value, quality in self: + if quality != 1: + value = f"{value};q={quality}" + result.append(value) + return ",".join(result) + + def __str__(self): + return self.to_header() + + def _best_single_match(self, match): + for client_item, quality in self: + if self._value_matches(match, client_item): + # self is sorted by specificity descending, we can exit + return client_item, quality + return None + + def best_match(self, matches, default=None): + """Returns the best match from a list of possible matches based + on the specificity and quality of the client. If two items have the + same quality and specificity, the one is returned that comes first. + + :param matches: a list of matches to check for + :param default: the value that is returned if none match + """ + result = default + best_quality = -1 + best_specificity = (-1,) + for server_item in matches: + match = self._best_single_match(server_item) + if not match: + continue + client_item, quality = match + specificity = self._specificity(client_item) + if quality <= 0 or quality < best_quality: + continue + # better quality or same quality but more specific => better match + if quality > best_quality or specificity > best_specificity: + result = server_item + best_quality = quality + best_specificity = specificity + return result + + @property + def best(self): + """The best match as value.""" + if self: + return self[0][0] + + +_mime_split_re = re.compile(r"/|(?:\s*;\s*)") + + +def _normalize_mime(value): + return _mime_split_re.split(value.lower()) + + +class MIMEAccept(Accept): + """Like :class:`Accept` but with special methods and behavior for + mimetypes. + """ + + def _specificity(self, value): + return tuple(x != "*" for x in _mime_split_re.split(value)) + + def _value_matches(self, value, item): + # item comes from the client, can't match if it's invalid. + if "/" not in item: + return False + + # value comes from the application, tell the developer when it + # doesn't look valid. + if "/" not in value: + raise ValueError(f"invalid mimetype {value!r}") + + # Split the match value into type, subtype, and a sorted list of parameters. + normalized_value = _normalize_mime(value) + value_type, value_subtype = normalized_value[:2] + value_params = sorted(normalized_value[2:]) + + # "*/*" is the only valid value that can start with "*". + if value_type == "*" and value_subtype != "*": + raise ValueError(f"invalid mimetype {value!r}") + + # Split the accept item into type, subtype, and parameters. + normalized_item = _normalize_mime(item) + item_type, item_subtype = normalized_item[:2] + item_params = sorted(normalized_item[2:]) + + # "*/not-*" from the client is invalid, can't match. + if item_type == "*" and item_subtype != "*": + return False + + return ( + (item_type == "*" and item_subtype == "*") + or (value_type == "*" and value_subtype == "*") + ) or ( + item_type == value_type + and ( + item_subtype == "*" + or value_subtype == "*" + or (item_subtype == value_subtype and item_params == value_params) + ) + ) + + @property + def accept_html(self): + """True if this object accepts HTML.""" + return ( + "text/html" in self or "application/xhtml+xml" in self or self.accept_xhtml + ) + + @property + def accept_xhtml(self): + """True if this object accepts XHTML.""" + return "application/xhtml+xml" in self or "application/xml" in self + + @property + def accept_json(self): + """True if this object accepts JSON.""" + return "application/json" in self + + +_locale_delim_re = re.compile(r"[_-]") + + +def _normalize_lang(value): + """Process a language tag for matching.""" + return _locale_delim_re.split(value.lower()) + + +class LanguageAccept(Accept): + """Like :class:`Accept` but with normalization for language tags.""" + + def _value_matches(self, value, item): + return item == "*" or _normalize_lang(value) == _normalize_lang(item) + + def best_match(self, matches, default=None): + """Given a list of supported values, finds the best match from + the list of accepted values. + + Language tags are normalized for the purpose of matching, but + are returned unchanged. + + If no exact match is found, this will fall back to matching + the first subtag (primary language only), first with the + accepted values then with the match values. This partial is not + applied to any other language subtags. + + The default is returned if no exact or fallback match is found. + + :param matches: A list of supported languages to find a match. + :param default: The value that is returned if none match. + """ + # Look for an exact match first. If a client accepts "en-US", + # "en-US" is a valid match at this point. + result = super().best_match(matches) + + if result is not None: + return result + + # Fall back to accepting primary tags. If a client accepts + # "en-US", "en" is a valid match at this point. Need to use + # re.split to account for 2 or 3 letter codes. + fallback = Accept( + [(_locale_delim_re.split(item[0], 1)[0], item[1]) for item in self] + ) + result = fallback.best_match(matches) + + if result is not None: + return result + + # Fall back to matching primary tags. If the client accepts + # "en", "en-US" is a valid match at this point. + fallback_matches = [_locale_delim_re.split(item, 1)[0] for item in matches] + result = super().best_match(fallback_matches) + + # Return a value from the original match list. Find the first + # original value that starts with the matched primary tag. + if result is not None: + return next(item for item in matches if item.startswith(result)) + + return default + + +class CharsetAccept(Accept): + """Like :class:`Accept` but with normalization for charsets.""" + + def _value_matches(self, value, item): + def _normalize(name): + try: + return codecs.lookup(name).name + except LookupError: + return name.lower() + + return item == "*" or _normalize(value) == _normalize(item) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/accept.pyi b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/accept.pyi new file mode 100644 index 0000000000000000000000000000000000000000..4b74dd9505d9045f369a061ece565400887fb296 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/accept.pyi @@ -0,0 +1,54 @@ +from collections.abc import Iterable +from collections.abc import Iterator +from typing import overload + +from .structures import ImmutableList + +class Accept(ImmutableList[tuple[str, int]]): + provided: bool + def __init__( + self, values: Accept | Iterable[tuple[str, float]] | None = None + ) -> None: ... + def _specificity(self, value: str) -> tuple[bool, ...]: ... + def _value_matches(self, value: str, item: str) -> bool: ... + @overload # type: ignore + def __getitem__(self, key: str) -> int: ... + @overload + def __getitem__(self, key: int) -> tuple[str, int]: ... + @overload + def __getitem__(self, key: slice) -> Iterable[tuple[str, int]]: ... + def quality(self, key: str) -> int: ... + def __contains__(self, value: str) -> bool: ... # type: ignore + def index(self, key: str) -> int: ... # type: ignore + def find(self, key: str) -> int: ... + def values(self) -> Iterator[str]: ... + def to_header(self) -> str: ... + def _best_single_match(self, match: str) -> tuple[str, int] | None: ... + @overload + def best_match(self, matches: Iterable[str], default: str) -> str: ... + @overload + def best_match( + self, matches: Iterable[str], default: str | None = None + ) -> str | None: ... + @property + def best(self) -> str: ... + +def _normalize_mime(value: str) -> list[str]: ... + +class MIMEAccept(Accept): + def _specificity(self, value: str) -> tuple[bool, ...]: ... + def _value_matches(self, value: str, item: str) -> bool: ... + @property + def accept_html(self) -> bool: ... + @property + def accept_xhtml(self) -> bool: ... + @property + def accept_json(self) -> bool: ... + +def _normalize_lang(value: str) -> list[str]: ... + +class LanguageAccept(Accept): + def _value_matches(self, value: str, item: str) -> bool: ... + +class CharsetAccept(Accept): + def _value_matches(self, value: str, item: str) -> bool: ... diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/auth.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..2f2515020c038a8b8efa6f321169cf792b5bf765 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/auth.py @@ -0,0 +1,510 @@ +from __future__ import annotations + +import base64 +import binascii +import typing as t +import warnings +from functools import wraps + +from ..http import dump_header +from ..http import parse_dict_header +from ..http import parse_set_header +from ..http import quote_header_value +from .structures import CallbackDict +from .structures import HeaderSet + +if t.TYPE_CHECKING: + import typing_extensions as te + + +class Authorization: + """Represents the parts of an ``Authorization`` request header. + + :attr:`.Request.authorization` returns an instance if the header is set. + + An instance can be used with the test :class:`.Client` request methods' ``auth`` + parameter to send the header in test requests. + + Depending on the auth scheme, either :attr:`parameters` or :attr:`token` will be + set. The ``Basic`` scheme's token is decoded into the ``username`` and ``password`` + parameters. + + For convenience, ``auth["key"]`` and ``auth.key`` both access the key in the + :attr:`parameters` dict, along with ``auth.get("key")`` and ``"key" in auth``. + + .. versionchanged:: 2.3 + The ``token`` parameter and attribute was added to support auth schemes that use + a token instead of parameters, such as ``Bearer``. + + .. versionchanged:: 2.3 + The object is no longer a ``dict``. + + .. versionchanged:: 0.5 + The object is an immutable dict. + """ + + def __init__( + self, + auth_type: str, + data: dict[str, str] | None = None, + token: str | None = None, + ) -> None: + self.type = auth_type + """The authorization scheme, like ``basic``, ``digest``, or ``bearer``.""" + + if data is None: + data = {} + + self.parameters = data + """A dict of parameters parsed from the header. Either this or :attr:`token` + will have a value for a given scheme. + """ + + self.token = token + """A token parsed from the header. Either this or :attr:`parameters` will have a + value for a given scheme. + + .. versionadded:: 2.3 + """ + + def __getattr__(self, name: str) -> str | None: + return self.parameters.get(name) + + def __getitem__(self, name: str) -> str | None: + return self.parameters.get(name) + + def get(self, key: str, default: str | None = None) -> str | None: + return self.parameters.get(key, default) + + def __contains__(self, key: str) -> bool: + return key in self.parameters + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Authorization): + return NotImplemented + + return ( + other.type == self.type + and other.token == self.token + and other.parameters == self.parameters + ) + + @classmethod + def from_header(cls, value: str | None) -> te.Self | None: + """Parse an ``Authorization`` header value and return an instance, or ``None`` + if the value is empty. + + :param value: The header value to parse. + + .. versionadded:: 2.3 + """ + if not value: + return None + + scheme, _, rest = value.partition(" ") + scheme = scheme.lower() + rest = rest.strip() + + if scheme == "basic": + try: + username, _, password = base64.b64decode(rest).decode().partition(":") + except (binascii.Error, UnicodeError): + return None + + return cls(scheme, {"username": username, "password": password}) + + if "=" in rest.rstrip("="): + # = that is not trailing, this is parameters. + return cls(scheme, parse_dict_header(rest), None) + + # No = or only trailing =, this is a token. + return cls(scheme, None, rest) + + def to_header(self) -> str: + """Produce an ``Authorization`` header value representing this data. + + .. versionadded:: 2.0 + """ + if self.type == "basic": + value = base64.b64encode( + f"{self.username}:{self.password}".encode() + ).decode("utf8") + return f"Basic {value}" + + if self.token is not None: + return f"{self.type.title()} {self.token}" + + return f"{self.type.title()} {dump_header(self.parameters)}" + + def __str__(self) -> str: + return self.to_header() + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.to_header()}>" + + +def auth_property(name: str, doc: str | None = None) -> property: + """A static helper function for Authentication subclasses to add + extra authentication system properties onto a class:: + + class FooAuthenticate(WWWAuthenticate): + special_realm = auth_property('special_realm') + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. + """ + warnings.warn( + "'auth_property' is deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + def _set_value(self, value): # type: ignore[no-untyped-def] + if value is None: + self.pop(name, None) + else: + self[name] = str(value) + + return property(lambda x: x.get(name), _set_value, doc=doc) + + +class WWWAuthenticate: + """Represents the parts of a ``WWW-Authenticate`` response header. + + Set :attr:`.Response.www_authenticate` to an instance of list of instances to set + values for this header in the response. Modifying this instance will modify the + header value. + + Depending on the auth scheme, either :attr:`parameters` or :attr:`token` should be + set. The ``Basic`` scheme will encode ``username`` and ``password`` parameters to a + token. + + For convenience, ``auth["key"]`` and ``auth.key`` both act on the :attr:`parameters` + dict, and can be used to get, set, or delete parameters. ``auth.get("key")`` and + ``"key" in auth`` are also provided. + + .. versionchanged:: 2.3 + The ``token`` parameter and attribute was added to support auth schemes that use + a token instead of parameters, such as ``Bearer``. + + .. versionchanged:: 2.3 + The object is no longer a ``dict``. + + .. versionchanged:: 2.3 + The ``on_update`` parameter was removed. + """ + + def __init__( + self, + auth_type: str | None = None, + values: dict[str, str] | None = None, + token: str | None = None, + ): + if auth_type is None: + warnings.warn( + "An auth type must be given as the first parameter. Assuming 'basic' is" + " deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + auth_type = "basic" + + self._type = auth_type.lower() + self._parameters: dict[str, str] = CallbackDict( # type: ignore[misc] + values, lambda _: self._trigger_on_update() + ) + self._token = token + self._on_update: t.Callable[[WWWAuthenticate], None] | None = None + + def _trigger_on_update(self) -> None: + if self._on_update is not None: + self._on_update(self) + + @property + def type(self) -> str: + """The authorization scheme, like ``basic``, ``digest``, or ``bearer``.""" + return self._type + + @type.setter + def type(self, value: str) -> None: + self._type = value + self._trigger_on_update() + + @property + def parameters(self) -> dict[str, str]: + """A dict of parameters for the header. Only one of this or :attr:`token` should + have a value for a given scheme. + """ + return self._parameters + + @parameters.setter + def parameters(self, value: dict[str, str]) -> None: + self._parameters = CallbackDict( # type: ignore[misc] + value, lambda _: self._trigger_on_update() + ) + self._trigger_on_update() + + @property + def token(self) -> str | None: + """A dict of parameters for the header. Only one of this or :attr:`token` should + have a value for a given scheme. + """ + return self._token + + @token.setter + def token(self, value: str | None) -> None: + """A token for the header. Only one of this or :attr:`parameters` should have a + value for a given scheme. + + .. versionadded:: 2.3 + """ + self._token = value + self._trigger_on_update() + + def set_basic(self, realm: str = "authentication required") -> None: + """Clear any existing data and set a ``Basic`` challenge. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Create and assign an instance instead. + """ + warnings.warn( + "The 'set_basic' method is deprecated and will be removed in Werkzeug 3.0." + " Create and assign an instance instead." + ) + self._type = "basic" + dict.clear(self.parameters) # type: ignore[arg-type] + dict.update( + self.parameters, # type: ignore[arg-type] + {"realm": realm}, # type: ignore[dict-item] + ) + self._token = None + self._trigger_on_update() + + def set_digest( + self, + realm: str, + nonce: str, + qop: t.Sequence[str] = ("auth",), + opaque: str | None = None, + algorithm: str | None = None, + stale: bool = False, + ) -> None: + """Clear any existing data and set a ``Digest`` challenge. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Create and assign an instance instead. + """ + warnings.warn( + "The 'set_digest' method is deprecated and will be removed in Werkzeug 3.0." + " Create and assign an instance instead." + ) + self._type = "digest" + dict.clear(self.parameters) # type: ignore[arg-type] + parameters = { + "realm": realm, + "nonce": nonce, + "qop": ", ".join(qop), + "stale": "TRUE" if stale else "FALSE", + } + + if opaque is not None: + parameters["opaque"] = opaque + + if algorithm is not None: + parameters["algorithm"] = algorithm + + dict.update(self.parameters, parameters) # type: ignore[arg-type] + self._token = None + self._trigger_on_update() + + def __getitem__(self, key: str) -> str | None: + return self.parameters.get(key) + + def __setitem__(self, key: str, value: str | None) -> None: + if value is None: + if key in self.parameters: + del self.parameters[key] + else: + self.parameters[key] = value + + self._trigger_on_update() + + def __delitem__(self, key: str) -> None: + if key in self.parameters: + del self.parameters[key] + self._trigger_on_update() + + def __getattr__(self, name: str) -> str | None: + return self[name] + + def __setattr__(self, name: str, value: str | None) -> None: + if name in {"_type", "_parameters", "_token", "_on_update"}: + super().__setattr__(name, value) + else: + self[name] = value + + def __delattr__(self, name: str) -> None: + del self[name] + + def __contains__(self, key: str) -> bool: + return key in self.parameters + + def __eq__(self, other: object) -> bool: + if not isinstance(other, WWWAuthenticate): + return NotImplemented + + return ( + other.type == self.type + and other.token == self.token + and other.parameters == self.parameters + ) + + def get(self, key: str, default: str | None = None) -> str | None: + return self.parameters.get(key, default) + + @classmethod + def from_header(cls, value: str | None) -> te.Self | None: + """Parse a ``WWW-Authenticate`` header value and return an instance, or ``None`` + if the value is empty. + + :param value: The header value to parse. + + .. versionadded:: 2.3 + """ + if not value: + return None + + scheme, _, rest = value.partition(" ") + scheme = scheme.lower() + rest = rest.strip() + + if "=" in rest.rstrip("="): + # = that is not trailing, this is parameters. + return cls(scheme, parse_dict_header(rest), None) + + # No = or only trailing =, this is a token. + return cls(scheme, None, rest) + + def to_header(self) -> str: + """Produce a ``WWW-Authenticate`` header value representing this data.""" + if self.token is not None: + return f"{self.type.title()} {self.token}" + + if self.type == "digest": + items = [] + + for key, value in self.parameters.items(): + if key in {"realm", "domain", "nonce", "opaque", "qop"}: + value = quote_header_value(value, allow_token=False) + else: + value = quote_header_value(value) + + items.append(f"{key}={value}") + + return f"Digest {', '.join(items)}" + + return f"{self.type.title()} {dump_header(self.parameters)}" + + def __str__(self) -> str: + return self.to_header() + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.to_header()}>" + + @property + def qop(self) -> set[str]: + """The ``qop`` parameter as a set. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. It will become the same as other + parameters, returning a string. + """ + warnings.warn( + "The 'qop' property is deprecated and will be removed in Werkzeug 3.0." + " It will become the same as other parameters, returning a string.", + DeprecationWarning, + stacklevel=2, + ) + + def on_update(value: HeaderSet) -> None: + if not value: + if "qop" in self: + del self["qop"] + + return + + self.parameters["qop"] = value.to_header() + + return parse_set_header(self.parameters.get("qop"), on_update) + + @property + def stale(self) -> bool | None: + """The ``stale`` parameter as a boolean. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. It will become the same as other + parameters, returning a string. + """ + warnings.warn( + "The 'stale' property is deprecated and will be removed in Werkzeug 3.0." + " It will become the same as other parameters, returning a string.", + DeprecationWarning, + stacklevel=2, + ) + + if "stale" in self.parameters: + return self.parameters["stale"].lower() == "true" + + return None + + @stale.setter + def stale(self, value: bool | str | None) -> None: + if value is None: + if "stale" in self.parameters: + del self.parameters["stale"] + + return + + if isinstance(value, bool): + warnings.warn( + "Setting the 'stale' property to a boolean is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + self.parameters["stale"] = "TRUE" if value else "FALSE" + else: + self.parameters["stale"] = value + + auth_property = staticmethod(auth_property) + + +def _deprecated_dict_method(f): # type: ignore[no-untyped-def] + @wraps(f) + def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] + warnings.warn( + "Treating 'Authorization' and 'WWWAuthenticate' as a dict is deprecated and" + " will be removed in Werkzeug 3.0. Use the 'parameters' attribute instead.", + DeprecationWarning, + stacklevel=2, + ) + return f(*args, **kwargs) + + return wrapper + + +for name in ( + "__iter__", + "clear", + "copy", + "items", + "keys", + "pop", + "popitem", + "setdefault", + "update", + "values", +): + f = _deprecated_dict_method(getattr(dict, name)) + setattr(Authorization, name, f) + setattr(WWWAuthenticate, name, f) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/cache_control.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/cache_control.py new file mode 100644 index 0000000000000000000000000000000000000000..bff4c18bbd5eaaf21c61561e6067ebba8f57b2d5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/cache_control.py @@ -0,0 +1,175 @@ +from __future__ import annotations + +from .mixins import ImmutableDictMixin +from .mixins import UpdateDictMixin + + +def cache_control_property(key, empty, type): + """Return a new property object for a cache header. Useful if you + want to add support for a cache extension in a subclass. + + .. versionchanged:: 2.0 + Renamed from ``cache_property``. + """ + return property( + lambda x: x._get_cache_value(key, empty, type), + lambda x, v: x._set_cache_value(key, v, type), + lambda x: x._del_cache_value(key), + f"accessor for {key!r}", + ) + + +class _CacheControl(UpdateDictMixin, dict): + """Subclass of a dict that stores values for a Cache-Control header. It + has accessors for all the cache-control directives specified in RFC 2616. + The class does not differentiate between request and response directives. + + Because the cache-control directives in the HTTP header use dashes the + python descriptors use underscores for that. + + To get a header of the :class:`CacheControl` object again you can convert + the object into a string or call the :meth:`to_header` method. If you plan + to subclass it and add your own items have a look at the sourcecode for + that class. + + .. versionchanged:: 2.1.0 + Setting int properties such as ``max_age`` will convert the + value to an int. + + .. versionchanged:: 0.4 + + Setting `no_cache` or `private` to boolean `True` will set the implicit + none-value which is ``*``: + + >>> cc = ResponseCacheControl() + >>> cc.no_cache = True + >>> cc + <ResponseCacheControl 'no-cache'> + >>> cc.no_cache + '*' + >>> cc.no_cache = None + >>> cc + <ResponseCacheControl ''> + + In versions before 0.5 the behavior documented here affected the now + no longer existing `CacheControl` class. + """ + + no_cache = cache_control_property("no-cache", "*", None) + no_store = cache_control_property("no-store", None, bool) + max_age = cache_control_property("max-age", -1, int) + no_transform = cache_control_property("no-transform", None, None) + + def __init__(self, values=(), on_update=None): + dict.__init__(self, values or ()) + self.on_update = on_update + self.provided = values is not None + + def _get_cache_value(self, key, empty, type): + """Used internally by the accessor properties.""" + if type is bool: + return key in self + if key in self: + value = self[key] + if value is None: + return empty + elif type is not None: + try: + value = type(value) + except ValueError: + pass + return value + return None + + def _set_cache_value(self, key, value, type): + """Used internally by the accessor properties.""" + if type is bool: + if value: + self[key] = None + else: + self.pop(key, None) + else: + if value is None: + self.pop(key, None) + elif value is True: + self[key] = None + else: + if type is not None: + self[key] = type(value) + else: + self[key] = value + + def _del_cache_value(self, key): + """Used internally by the accessor properties.""" + if key in self: + del self[key] + + def to_header(self): + """Convert the stored values into a cache control header.""" + return http.dump_header(self) + + def __str__(self): + return self.to_header() + + def __repr__(self): + kv_str = " ".join(f"{k}={v!r}" for k, v in sorted(self.items())) + return f"<{type(self).__name__} {kv_str}>" + + cache_property = staticmethod(cache_control_property) + + +class RequestCacheControl(ImmutableDictMixin, _CacheControl): + """A cache control for requests. This is immutable and gives access + to all the request-relevant cache control headers. + + To get a header of the :class:`RequestCacheControl` object again you can + convert the object into a string or call the :meth:`to_header` method. If + you plan to subclass it and add your own items have a look at the sourcecode + for that class. + + .. versionchanged:: 2.1.0 + Setting int properties such as ``max_age`` will convert the + value to an int. + + .. versionadded:: 0.5 + In previous versions a `CacheControl` class existed that was used + both for request and response. + """ + + max_stale = cache_control_property("max-stale", "*", int) + min_fresh = cache_control_property("min-fresh", "*", int) + only_if_cached = cache_control_property("only-if-cached", None, bool) + + +class ResponseCacheControl(_CacheControl): + """A cache control for responses. Unlike :class:`RequestCacheControl` + this is mutable and gives access to response-relevant cache control + headers. + + To get a header of the :class:`ResponseCacheControl` object again you can + convert the object into a string or call the :meth:`to_header` method. If + you plan to subclass it and add your own items have a look at the sourcecode + for that class. + + .. versionchanged:: 2.1.1 + ``s_maxage`` converts the value to an int. + + .. versionchanged:: 2.1.0 + Setting int properties such as ``max_age`` will convert the + value to an int. + + .. versionadded:: 0.5 + In previous versions a `CacheControl` class existed that was used + both for request and response. + """ + + public = cache_control_property("public", None, bool) + private = cache_control_property("private", "*", None) + must_revalidate = cache_control_property("must-revalidate", None, bool) + proxy_revalidate = cache_control_property("proxy-revalidate", None, bool) + s_maxage = cache_control_property("s-maxage", None, int) + immutable = cache_control_property("immutable", None, bool) + + +# circular dependencies +from .. import http diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/cache_control.pyi b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/cache_control.pyi new file mode 100644 index 0000000000000000000000000000000000000000..06fe667a24d8aad8533462f790fb6193d7540973 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/cache_control.pyi @@ -0,0 +1,109 @@ +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Mapping +from typing import TypeVar + +from .mixins import ImmutableDictMixin +from .mixins import UpdateDictMixin + +T = TypeVar("T") +_CPT = TypeVar("_CPT", str, int, bool) +_OptCPT = _CPT | None + +def cache_control_property(key: str, empty: _OptCPT, type: type[_CPT]) -> property: ... + +class _CacheControl(UpdateDictMixin[str, _OptCPT], dict[str, _OptCPT]): + provided: bool + def __init__( + self, + values: Mapping[str, _OptCPT] | Iterable[tuple[str, _OptCPT]] = (), + on_update: Callable[[_CacheControl], None] | None = None, + ) -> None: ... + @property + def no_cache(self) -> bool | None: ... + @no_cache.setter + def no_cache(self, value: bool | None) -> None: ... + @no_cache.deleter + def no_cache(self) -> None: ... + @property + def no_store(self) -> bool | None: ... + @no_store.setter + def no_store(self, value: bool | None) -> None: ... + @no_store.deleter + def no_store(self) -> None: ... + @property + def max_age(self) -> int | None: ... + @max_age.setter + def max_age(self, value: int | None) -> None: ... + @max_age.deleter + def max_age(self) -> None: ... + @property + def no_transform(self) -> bool | None: ... + @no_transform.setter + def no_transform(self, value: bool | None) -> None: ... + @no_transform.deleter + def no_transform(self) -> None: ... + def _get_cache_value(self, key: str, empty: T | None, type: type[T]) -> T: ... + def _set_cache_value(self, key: str, value: T | None, type: type[T]) -> None: ... + def _del_cache_value(self, key: str) -> None: ... + def to_header(self) -> str: ... + @staticmethod + def cache_property(key: str, empty: _OptCPT, type: type[_CPT]) -> property: ... + +class RequestCacheControl(ImmutableDictMixin[str, _OptCPT], _CacheControl): + @property + def max_stale(self) -> int | None: ... + @max_stale.setter + def max_stale(self, value: int | None) -> None: ... + @max_stale.deleter + def max_stale(self) -> None: ... + @property + def min_fresh(self) -> int | None: ... + @min_fresh.setter + def min_fresh(self, value: int | None) -> None: ... + @min_fresh.deleter + def min_fresh(self) -> None: ... + @property + def only_if_cached(self) -> bool | None: ... + @only_if_cached.setter + def only_if_cached(self, value: bool | None) -> None: ... + @only_if_cached.deleter + def only_if_cached(self) -> None: ... + +class ResponseCacheControl(_CacheControl): + @property + def public(self) -> bool | None: ... + @public.setter + def public(self, value: bool | None) -> None: ... + @public.deleter + def public(self) -> None: ... + @property + def private(self) -> bool | None: ... + @private.setter + def private(self, value: bool | None) -> None: ... + @private.deleter + def private(self) -> None: ... + @property + def must_revalidate(self) -> bool | None: ... + @must_revalidate.setter + def must_revalidate(self, value: bool | None) -> None: ... + @must_revalidate.deleter + def must_revalidate(self) -> None: ... + @property + def proxy_revalidate(self) -> bool | None: ... + @proxy_revalidate.setter + def proxy_revalidate(self, value: bool | None) -> None: ... + @proxy_revalidate.deleter + def proxy_revalidate(self) -> None: ... + @property + def s_maxage(self) -> int | None: ... + @s_maxage.setter + def s_maxage(self, value: int | None) -> None: ... + @s_maxage.deleter + def s_maxage(self) -> None: ... + @property + def immutable(self) -> bool | None: ... + @immutable.setter + def immutable(self, value: bool | None) -> None: ... + @immutable.deleter + def immutable(self) -> None: ... diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/csp.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/csp.py new file mode 100644 index 0000000000000000000000000000000000000000..dde9414951ce7ea6847ddeb6c252fc510dd49240 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/csp.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +from .mixins import UpdateDictMixin + + +def csp_property(key): + """Return a new property object for a content security policy header. + Useful if you want to add support for a csp extension in a + subclass. + """ + return property( + lambda x: x._get_value(key), + lambda x, v: x._set_value(key, v), + lambda x: x._del_value(key), + f"accessor for {key!r}", + ) + + +class ContentSecurityPolicy(UpdateDictMixin, dict): + """Subclass of a dict that stores values for a Content Security Policy + header. It has accessors for all the level 3 policies. + + Because the csp directives in the HTTP header use dashes the + python descriptors use underscores for that. + + To get a header of the :class:`ContentSecuirtyPolicy` object again + you can convert the object into a string or call the + :meth:`to_header` method. If you plan to subclass it and add your + own items have a look at the sourcecode for that class. + + .. versionadded:: 1.0.0 + Support for Content Security Policy headers was added. + + """ + + base_uri = csp_property("base-uri") + child_src = csp_property("child-src") + connect_src = csp_property("connect-src") + default_src = csp_property("default-src") + font_src = csp_property("font-src") + form_action = csp_property("form-action") + frame_ancestors = csp_property("frame-ancestors") + frame_src = csp_property("frame-src") + img_src = csp_property("img-src") + manifest_src = csp_property("manifest-src") + media_src = csp_property("media-src") + navigate_to = csp_property("navigate-to") + object_src = csp_property("object-src") + prefetch_src = csp_property("prefetch-src") + plugin_types = csp_property("plugin-types") + report_to = csp_property("report-to") + report_uri = csp_property("report-uri") + sandbox = csp_property("sandbox") + script_src = csp_property("script-src") + script_src_attr = csp_property("script-src-attr") + script_src_elem = csp_property("script-src-elem") + style_src = csp_property("style-src") + style_src_attr = csp_property("style-src-attr") + style_src_elem = csp_property("style-src-elem") + worker_src = csp_property("worker-src") + + def __init__(self, values=(), on_update=None): + dict.__init__(self, values or ()) + self.on_update = on_update + self.provided = values is not None + + def _get_value(self, key): + """Used internally by the accessor properties.""" + return self.get(key) + + def _set_value(self, key, value): + """Used internally by the accessor properties.""" + if value is None: + self.pop(key, None) + else: + self[key] = value + + def _del_value(self, key): + """Used internally by the accessor properties.""" + if key in self: + del self[key] + + def to_header(self): + """Convert the stored values into a cache control header.""" + from ..http import dump_csp_header + + return dump_csp_header(self) + + def __str__(self): + return self.to_header() + + def __repr__(self): + kv_str = " ".join(f"{k}={v!r}" for k, v in sorted(self.items())) + return f"<{type(self).__name__} {kv_str}>" diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/csp.pyi b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/csp.pyi new file mode 100644 index 0000000000000000000000000000000000000000..f9e2ac0f463713aa15abce736ca11df54105c0dc --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/csp.pyi @@ -0,0 +1,169 @@ +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Mapping + +from .mixins import UpdateDictMixin + +def csp_property(key: str) -> property: ... + +class ContentSecurityPolicy(UpdateDictMixin[str, str], dict[str, str]): + @property + def base_uri(self) -> str | None: ... + @base_uri.setter + def base_uri(self, value: str | None) -> None: ... + @base_uri.deleter + def base_uri(self) -> None: ... + @property + def child_src(self) -> str | None: ... + @child_src.setter + def child_src(self, value: str | None) -> None: ... + @child_src.deleter + def child_src(self) -> None: ... + @property + def connect_src(self) -> str | None: ... + @connect_src.setter + def connect_src(self, value: str | None) -> None: ... + @connect_src.deleter + def connect_src(self) -> None: ... + @property + def default_src(self) -> str | None: ... + @default_src.setter + def default_src(self, value: str | None) -> None: ... + @default_src.deleter + def default_src(self) -> None: ... + @property + def font_src(self) -> str | None: ... + @font_src.setter + def font_src(self, value: str | None) -> None: ... + @font_src.deleter + def font_src(self) -> None: ... + @property + def form_action(self) -> str | None: ... + @form_action.setter + def form_action(self, value: str | None) -> None: ... + @form_action.deleter + def form_action(self) -> None: ... + @property + def frame_ancestors(self) -> str | None: ... + @frame_ancestors.setter + def frame_ancestors(self, value: str | None) -> None: ... + @frame_ancestors.deleter + def frame_ancestors(self) -> None: ... + @property + def frame_src(self) -> str | None: ... + @frame_src.setter + def frame_src(self, value: str | None) -> None: ... + @frame_src.deleter + def frame_src(self) -> None: ... + @property + def img_src(self) -> str | None: ... + @img_src.setter + def img_src(self, value: str | None) -> None: ... + @img_src.deleter + def img_src(self) -> None: ... + @property + def manifest_src(self) -> str | None: ... + @manifest_src.setter + def manifest_src(self, value: str | None) -> None: ... + @manifest_src.deleter + def manifest_src(self) -> None: ... + @property + def media_src(self) -> str | None: ... + @media_src.setter + def media_src(self, value: str | None) -> None: ... + @media_src.deleter + def media_src(self) -> None: ... + @property + def navigate_to(self) -> str | None: ... + @navigate_to.setter + def navigate_to(self, value: str | None) -> None: ... + @navigate_to.deleter + def navigate_to(self) -> None: ... + @property + def object_src(self) -> str | None: ... + @object_src.setter + def object_src(self, value: str | None) -> None: ... + @object_src.deleter + def object_src(self) -> None: ... + @property + def prefetch_src(self) -> str | None: ... + @prefetch_src.setter + def prefetch_src(self, value: str | None) -> None: ... + @prefetch_src.deleter + def prefetch_src(self) -> None: ... + @property + def plugin_types(self) -> str | None: ... + @plugin_types.setter + def plugin_types(self, value: str | None) -> None: ... + @plugin_types.deleter + def plugin_types(self) -> None: ... + @property + def report_to(self) -> str | None: ... + @report_to.setter + def report_to(self, value: str | None) -> None: ... + @report_to.deleter + def report_to(self) -> None: ... + @property + def report_uri(self) -> str | None: ... + @report_uri.setter + def report_uri(self, value: str | None) -> None: ... + @report_uri.deleter + def report_uri(self) -> None: ... + @property + def sandbox(self) -> str | None: ... + @sandbox.setter + def sandbox(self, value: str | None) -> None: ... + @sandbox.deleter + def sandbox(self) -> None: ... + @property + def script_src(self) -> str | None: ... + @script_src.setter + def script_src(self, value: str | None) -> None: ... + @script_src.deleter + def script_src(self) -> None: ... + @property + def script_src_attr(self) -> str | None: ... + @script_src_attr.setter + def script_src_attr(self, value: str | None) -> None: ... + @script_src_attr.deleter + def script_src_attr(self) -> None: ... + @property + def script_src_elem(self) -> str | None: ... + @script_src_elem.setter + def script_src_elem(self, value: str | None) -> None: ... + @script_src_elem.deleter + def script_src_elem(self) -> None: ... + @property + def style_src(self) -> str | None: ... + @style_src.setter + def style_src(self, value: str | None) -> None: ... + @style_src.deleter + def style_src(self) -> None: ... + @property + def style_src_attr(self) -> str | None: ... + @style_src_attr.setter + def style_src_attr(self, value: str | None) -> None: ... + @style_src_attr.deleter + def style_src_attr(self) -> None: ... + @property + def style_src_elem(self) -> str | None: ... + @style_src_elem.setter + def style_src_elem(self, value: str | None) -> None: ... + @style_src_elem.deleter + def style_src_elem(self) -> None: ... + @property + def worker_src(self) -> str | None: ... + @worker_src.setter + def worker_src(self, value: str | None) -> None: ... + @worker_src.deleter + def worker_src(self) -> None: ... + provided: bool + def __init__( + self, + values: Mapping[str, str] | Iterable[tuple[str, str]] = (), + on_update: Callable[[ContentSecurityPolicy], None] | None = None, + ) -> None: ... + def _get_value(self, key: str) -> str | None: ... + def _set_value(self, key: str, value: str) -> None: ... + def _del_value(self, key: str) -> None: ... + def to_header(self) -> str: ... diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/etag.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/etag.py new file mode 100644 index 0000000000000000000000000000000000000000..747d9966ddc7c8ea034c07aacb7dcb3499eaa1f8 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/etag.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +from collections.abc import Collection + + +class ETags(Collection): + """A set that can be used to check if one etag is present in a collection + of etags. + """ + + def __init__(self, strong_etags=None, weak_etags=None, star_tag=False): + if not star_tag and strong_etags: + self._strong = frozenset(strong_etags) + else: + self._strong = frozenset() + + self._weak = frozenset(weak_etags or ()) + self.star_tag = star_tag + + def as_set(self, include_weak=False): + """Convert the `ETags` object into a python set. Per default all the + weak etags are not part of this set.""" + rv = set(self._strong) + if include_weak: + rv.update(self._weak) + return rv + + def is_weak(self, etag): + """Check if an etag is weak.""" + return etag in self._weak + + def is_strong(self, etag): + """Check if an etag is strong.""" + return etag in self._strong + + def contains_weak(self, etag): + """Check if an etag is part of the set including weak and strong tags.""" + return self.is_weak(etag) or self.contains(etag) + + def contains(self, etag): + """Check if an etag is part of the set ignoring weak tags. + It is also possible to use the ``in`` operator. + """ + if self.star_tag: + return True + return self.is_strong(etag) + + def contains_raw(self, etag): + """When passed a quoted tag it will check if this tag is part of the + set. If the tag is weak it is checked against weak and strong tags, + otherwise strong only.""" + from ..http import unquote_etag + + etag, weak = unquote_etag(etag) + if weak: + return self.contains_weak(etag) + return self.contains(etag) + + def to_header(self): + """Convert the etags set into a HTTP header string.""" + if self.star_tag: + return "*" + return ", ".join( + [f'"{x}"' for x in self._strong] + [f'W/"{x}"' for x in self._weak] + ) + + def __call__(self, etag=None, data=None, include_weak=False): + if [etag, data].count(None) != 1: + raise TypeError("either tag or data required, but at least one") + if etag is None: + from ..http import generate_etag + + etag = generate_etag(data) + if include_weak: + if etag in self._weak: + return True + return etag in self._strong + + def __bool__(self): + return bool(self.star_tag or self._strong or self._weak) + + def __str__(self): + return self.to_header() + + def __len__(self): + return len(self._strong) + + def __iter__(self): + return iter(self._strong) + + def __contains__(self, etag): + return self.contains(etag) + + def __repr__(self): + return f"<{type(self).__name__} {str(self)!r}>" diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/etag.pyi b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/etag.pyi new file mode 100644 index 0000000000000000000000000000000000000000..88e54f1548b993cc7bfdffec8216942fb5510dfd --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/etag.pyi @@ -0,0 +1,30 @@ +from collections.abc import Collection +from collections.abc import Iterable +from collections.abc import Iterator + +class ETags(Collection[str]): + _strong: frozenset[str] + _weak: frozenset[str] + star_tag: bool + def __init__( + self, + strong_etags: Iterable[str] | None = None, + weak_etags: Iterable[str] | None = None, + star_tag: bool = False, + ) -> None: ... + def as_set(self, include_weak: bool = False) -> set[str]: ... + def is_weak(self, etag: str) -> bool: ... + def is_strong(self, etag: str) -> bool: ... + def contains_weak(self, etag: str) -> bool: ... + def contains(self, etag: str) -> bool: ... + def contains_raw(self, etag: str) -> bool: ... + def to_header(self) -> str: ... + def __call__( + self, + etag: str | None = None, + data: bytes | None = None, + include_weak: bool = False, + ) -> bool: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[str]: ... + def __contains__(self, item: str) -> bool: ... # type: ignore diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/file_storage.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/file_storage.py new file mode 100644 index 0000000000000000000000000000000000000000..e878a56d4f435e471d550dd77192c7bdc3a125ba --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/file_storage.py @@ -0,0 +1,196 @@ +from __future__ import annotations + +import mimetypes +from io import BytesIO +from os import fsdecode +from os import fspath + +from .._internal import _plain_int +from .structures import MultiDict + + +class FileStorage: + """The :class:`FileStorage` class is a thin wrapper over incoming files. + It is used by the request object to represent uploaded files. All the + attributes of the wrapper stream are proxied by the file storage so + it's possible to do ``storage.read()`` instead of the long form + ``storage.stream.read()``. + """ + + def __init__( + self, + stream=None, + filename=None, + name=None, + content_type=None, + content_length=None, + headers=None, + ): + self.name = name + self.stream = stream or BytesIO() + + # If no filename is provided, attempt to get the filename from + # the stream object. Python names special streams like + # ``<stderr>`` with angular brackets, skip these streams. + if filename is None: + filename = getattr(stream, "name", None) + + if filename is not None: + filename = fsdecode(filename) + + if filename and filename[0] == "<" and filename[-1] == ">": + filename = None + else: + filename = fsdecode(filename) + + self.filename = filename + + if headers is None: + from .headers import Headers + + headers = Headers() + self.headers = headers + if content_type is not None: + headers["Content-Type"] = content_type + if content_length is not None: + headers["Content-Length"] = str(content_length) + + def _parse_content_type(self): + if not hasattr(self, "_parsed_content_type"): + self._parsed_content_type = http.parse_options_header(self.content_type) + + @property + def content_type(self): + """The content-type sent in the header. Usually not available""" + return self.headers.get("content-type") + + @property + def content_length(self): + """The content-length sent in the header. Usually not available""" + if "content-length" in self.headers: + try: + return _plain_int(self.headers["content-length"]) + except ValueError: + pass + + return 0 + + @property + def mimetype(self): + """Like :attr:`content_type`, but without parameters (eg, without + charset, type etc.) and always lowercase. For example if the content + type is ``text/HTML; charset=utf-8`` the mimetype would be + ``'text/html'``. + + .. versionadded:: 0.7 + """ + self._parse_content_type() + return self._parsed_content_type[0].lower() + + @property + def mimetype_params(self): + """The mimetype parameters as dict. For example if the content + type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + + .. versionadded:: 0.7 + """ + self._parse_content_type() + return self._parsed_content_type[1] + + def save(self, dst, buffer_size=16384): + """Save the file to a destination path or file object. If the + destination is a file object you have to close it yourself after the + call. The buffer size is the number of bytes held in memory during + the copy process. It defaults to 16KB. + + For secure file saving also have a look at :func:`secure_filename`. + + :param dst: a filename, :class:`os.PathLike`, or open file + object to write to. + :param buffer_size: Passed as the ``length`` parameter of + :func:`shutil.copyfileobj`. + + .. versionchanged:: 1.0 + Supports :mod:`pathlib`. + """ + from shutil import copyfileobj + + close_dst = False + + if hasattr(dst, "__fspath__"): + dst = fspath(dst) + + if isinstance(dst, str): + dst = open(dst, "wb") + close_dst = True + + try: + copyfileobj(self.stream, dst, buffer_size) + finally: + if close_dst: + dst.close() + + def close(self): + """Close the underlying file if possible.""" + try: + self.stream.close() + except Exception: + pass + + def __bool__(self): + return bool(self.filename) + + def __getattr__(self, name): + try: + return getattr(self.stream, name) + except AttributeError: + # SpooledTemporaryFile doesn't implement IOBase, get the + # attribute from its backing file instead. + # https://github.com/python/cpython/pull/3249 + if hasattr(self.stream, "_file"): + return getattr(self.stream._file, name) + raise + + def __iter__(self): + return iter(self.stream) + + def __repr__(self): + return f"<{type(self).__name__}: {self.filename!r} ({self.content_type!r})>" + + +class FileMultiDict(MultiDict): + """A special :class:`MultiDict` that has convenience methods to add + files to it. This is used for :class:`EnvironBuilder` and generally + useful for unittesting. + + .. versionadded:: 0.5 + """ + + def add_file(self, name, file, filename=None, content_type=None): + """Adds a new file to the dict. `file` can be a file name or + a :class:`file`-like or a :class:`FileStorage` object. + + :param name: the name of the field. + :param file: a filename or :class:`file`-like object + :param filename: an optional filename + :param content_type: an optional content type + """ + if isinstance(file, FileStorage): + value = file + else: + if isinstance(file, str): + if filename is None: + filename = file + file = open(file, "rb") + if filename and content_type is None: + content_type = ( + mimetypes.guess_type(filename)[0] or "application/octet-stream" + ) + value = FileStorage(file, filename, name, content_type) + + self.add(name, value) + + +# circular dependencies +from .. import http diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/file_storage.pyi b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/file_storage.pyi new file mode 100644 index 0000000000000000000000000000000000000000..730789e3549115e37b6c03c2d3532fc5e2fb4239 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/file_storage.pyi @@ -0,0 +1,47 @@ +from collections.abc import Iterator +from os import PathLike +from typing import Any +from typing import IO + +from .headers import Headers +from .structures import MultiDict + +class FileStorage: + name: str | None + stream: IO[bytes] + filename: str | None + headers: Headers + _parsed_content_type: tuple[str, dict[str, str]] + def __init__( + self, + stream: IO[bytes] | None = None, + filename: str | PathLike | None = None, + name: str | None = None, + content_type: str | None = None, + content_length: int | None = None, + headers: Headers | None = None, + ) -> None: ... + def _parse_content_type(self) -> None: ... + @property + def content_type(self) -> str: ... + @property + def content_length(self) -> int: ... + @property + def mimetype(self) -> str: ... + @property + def mimetype_params(self) -> dict[str, str]: ... + def save(self, dst: str | PathLike | IO[bytes], buffer_size: int = ...) -> None: ... + def close(self) -> None: ... + def __bool__(self) -> bool: ... + def __getattr__(self, name: str) -> Any: ... + def __iter__(self) -> Iterator[bytes]: ... + def __repr__(self) -> str: ... + +class FileMultiDict(MultiDict[str, FileStorage]): + def add_file( + self, + name: str, + file: FileStorage | str | IO[bytes], + filename: str | None = None, + content_type: str | None = None, + ) -> None: ... diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/headers.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/headers.py new file mode 100644 index 0000000000000000000000000000000000000000..dc060c41e30c1284705e08272a6eb306a1494e5c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/headers.py @@ -0,0 +1,566 @@ +from __future__ import annotations + +import re +import typing as t +import warnings + +from .._internal import _missing +from ..exceptions import BadRequestKeyError +from .mixins import ImmutableHeadersMixin +from .structures import iter_multi_items +from .structures import MultiDict + + +class Headers: + """An object that stores some headers. It has a dict-like interface, + but is ordered, can store the same key multiple times, and iterating + yields ``(key, value)`` pairs instead of only keys. + + This data structure is useful if you want a nicer way to handle WSGI + headers which are stored as tuples in a list. + + From Werkzeug 0.3 onwards, the :exc:`KeyError` raised by this class is + also a subclass of the :class:`~exceptions.BadRequest` HTTP exception + and will render a page for a ``400 BAD REQUEST`` if caught in a + catch-all for HTTP exceptions. + + Headers is mostly compatible with the Python :class:`wsgiref.headers.Headers` + class, with the exception of `__getitem__`. :mod:`wsgiref` will return + `None` for ``headers['missing']``, whereas :class:`Headers` will raise + a :class:`KeyError`. + + To create a new ``Headers`` object, pass it a list, dict, or + other ``Headers`` object with default values. These values are + validated the same way values added later are. + + :param defaults: The list of default values for the :class:`Headers`. + + .. versionchanged:: 2.1.0 + Default values are validated the same as values added later. + + .. versionchanged:: 0.9 + This data structure now stores unicode values similar to how the + multi dicts do it. The main difference is that bytes can be set as + well which will automatically be latin1 decoded. + + .. versionchanged:: 0.9 + The :meth:`linked` function was removed without replacement as it + was an API that does not support the changes to the encoding model. + """ + + def __init__(self, defaults=None): + self._list = [] + if defaults is not None: + self.extend(defaults) + + def __getitem__(self, key, _get_mode=False): + if not _get_mode: + if isinstance(key, int): + return self._list[key] + elif isinstance(key, slice): + return self.__class__(self._list[key]) + if not isinstance(key, str): + raise BadRequestKeyError(key) + ikey = key.lower() + for k, v in self._list: + if k.lower() == ikey: + return v + # micro optimization: if we are in get mode we will catch that + # exception one stack level down so we can raise a standard + # key error instead of our special one. + if _get_mode: + raise KeyError() + raise BadRequestKeyError(key) + + def __eq__(self, other): + def lowered(item): + return (item[0].lower(),) + item[1:] + + return other.__class__ is self.__class__ and set( + map(lowered, other._list) + ) == set(map(lowered, self._list)) + + __hash__ = None + + def get(self, key, default=None, type=None, as_bytes=None): + """Return the default value if the requested data doesn't exist. + If `type` is provided and is a callable it should convert the value, + return it or raise a :exc:`ValueError` if that is not possible. In + this case the function will return the default as if the value was not + found: + + >>> d = Headers([('Content-Length', '42')]) + >>> d.get('Content-Length', type=int) + 42 + + :param key: The key to be looked up. + :param default: The default value to be returned if the key can't + be looked up. If not further specified `None` is + returned. + :param type: A callable that is used to cast the value in the + :class:`Headers`. If a :exc:`ValueError` is raised + by this callable the default value is returned. + + .. versionchanged:: 2.3 + The ``as_bytes`` parameter is deprecated and will be removed + in Werkzeug 3.0. + + .. versionchanged:: 0.9 + The ``as_bytes`` parameter was added. + """ + if as_bytes is not None: + warnings.warn( + "The 'as_bytes' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + try: + rv = self.__getitem__(key, _get_mode=True) + except KeyError: + return default + if as_bytes: + rv = rv.encode("latin1") + if type is None: + return rv + try: + return type(rv) + except ValueError: + return default + + def getlist(self, key, type=None, as_bytes=None): + """Return the list of items for a given key. If that key is not in the + :class:`Headers`, the return value will be an empty list. Just like + :meth:`get`, :meth:`getlist` accepts a `type` parameter. All items will + be converted with the callable defined there. + + :param key: The key to be looked up. + :param type: A callable that is used to cast the value in the + :class:`Headers`. If a :exc:`ValueError` is raised + by this callable the value will be removed from the list. + :return: a :class:`list` of all the values for the key. + + .. versionchanged:: 2.3 + The ``as_bytes`` parameter is deprecated and will be removed + in Werkzeug 3.0. + + .. versionchanged:: 0.9 + The ``as_bytes`` parameter was added. + """ + if as_bytes is not None: + warnings.warn( + "The 'as_bytes' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + ikey = key.lower() + result = [] + for k, v in self: + if k.lower() == ikey: + if as_bytes: + v = v.encode("latin1") + if type is not None: + try: + v = type(v) + except ValueError: + continue + result.append(v) + return result + + def get_all(self, name): + """Return a list of all the values for the named field. + + This method is compatible with the :mod:`wsgiref` + :meth:`~wsgiref.headers.Headers.get_all` method. + """ + return self.getlist(name) + + def items(self, lower=False): + for key, value in self: + if lower: + key = key.lower() + yield key, value + + def keys(self, lower=False): + for key, _ in self.items(lower): + yield key + + def values(self): + for _, value in self.items(): + yield value + + def extend(self, *args, **kwargs): + """Extend headers in this object with items from another object + containing header items as well as keyword arguments. + + To replace existing keys instead of extending, use + :meth:`update` instead. + + If provided, the first argument can be another :class:`Headers` + object, a :class:`MultiDict`, :class:`dict`, or iterable of + pairs. + + .. versionchanged:: 1.0 + Support :class:`MultiDict`. Allow passing ``kwargs``. + """ + if len(args) > 1: + raise TypeError(f"update expected at most 1 arguments, got {len(args)}") + + if args: + for key, value in iter_multi_items(args[0]): + self.add(key, value) + + for key, value in iter_multi_items(kwargs): + self.add(key, value) + + def __delitem__(self, key, _index_operation=True): + if _index_operation and isinstance(key, (int, slice)): + del self._list[key] + return + key = key.lower() + new = [] + for k, v in self._list: + if k.lower() != key: + new.append((k, v)) + self._list[:] = new + + def remove(self, key): + """Remove a key. + + :param key: The key to be removed. + """ + return self.__delitem__(key, _index_operation=False) + + def pop(self, key=None, default=_missing): + """Removes and returns a key or index. + + :param key: The key to be popped. If this is an integer the item at + that position is removed, if it's a string the value for + that key is. If the key is omitted or `None` the last + item is removed. + :return: an item. + """ + if key is None: + return self._list.pop() + if isinstance(key, int): + return self._list.pop(key) + try: + rv = self[key] + self.remove(key) + except KeyError: + if default is not _missing: + return default + raise + return rv + + def popitem(self): + """Removes a key or index and returns a (key, value) item.""" + return self.pop() + + def __contains__(self, key): + """Check if a key is present.""" + try: + self.__getitem__(key, _get_mode=True) + except KeyError: + return False + return True + + def __iter__(self): + """Yield ``(key, value)`` tuples.""" + return iter(self._list) + + def __len__(self): + return len(self._list) + + def add(self, _key, _value, **kw): + """Add a new header tuple to the list. + + Keyword arguments can specify additional parameters for the header + value, with underscores converted to dashes:: + + >>> d = Headers() + >>> d.add('Content-Type', 'text/plain') + >>> d.add('Content-Disposition', 'attachment', filename='foo.png') + + The keyword argument dumping uses :func:`dump_options_header` + behind the scenes. + + .. versionadded:: 0.4.1 + keyword arguments were added for :mod:`wsgiref` compatibility. + """ + if kw: + _value = _options_header_vkw(_value, kw) + _key = _str_header_key(_key) + _value = _str_header_value(_value) + self._list.append((_key, _value)) + + def add_header(self, _key, _value, **_kw): + """Add a new header tuple to the list. + + An alias for :meth:`add` for compatibility with the :mod:`wsgiref` + :meth:`~wsgiref.headers.Headers.add_header` method. + """ + self.add(_key, _value, **_kw) + + def clear(self): + """Clears all headers.""" + del self._list[:] + + def set(self, _key, _value, **kw): + """Remove all header tuples for `key` and add a new one. The newly + added key either appears at the end of the list if there was no + entry or replaces the first one. + + Keyword arguments can specify additional parameters for the header + value, with underscores converted to dashes. See :meth:`add` for + more information. + + .. versionchanged:: 0.6.1 + :meth:`set` now accepts the same arguments as :meth:`add`. + + :param key: The key to be inserted. + :param value: The value to be inserted. + """ + if kw: + _value = _options_header_vkw(_value, kw) + _key = _str_header_key(_key) + _value = _str_header_value(_value) + if not self._list: + self._list.append((_key, _value)) + return + listiter = iter(self._list) + ikey = _key.lower() + for idx, (old_key, _old_value) in enumerate(listiter): + if old_key.lower() == ikey: + # replace first occurrence + self._list[idx] = (_key, _value) + break + else: + self._list.append((_key, _value)) + return + self._list[idx + 1 :] = [t for t in listiter if t[0].lower() != ikey] + + def setlist(self, key, values): + """Remove any existing values for a header and add new ones. + + :param key: The header key to set. + :param values: An iterable of values to set for the key. + + .. versionadded:: 1.0 + """ + if values: + values_iter = iter(values) + self.set(key, next(values_iter)) + + for value in values_iter: + self.add(key, value) + else: + self.remove(key) + + def setdefault(self, key, default): + """Return the first value for the key if it is in the headers, + otherwise set the header to the value given by ``default`` and + return that. + + :param key: The header key to get. + :param default: The value to set for the key if it is not in the + headers. + """ + if key in self: + return self[key] + + self.set(key, default) + return default + + def setlistdefault(self, key, default): + """Return the list of values for the key if it is in the + headers, otherwise set the header to the list of values given + by ``default`` and return that. + + Unlike :meth:`MultiDict.setlistdefault`, modifying the returned + list will not affect the headers. + + :param key: The header key to get. + :param default: An iterable of values to set for the key if it + is not in the headers. + + .. versionadded:: 1.0 + """ + if key not in self: + self.setlist(key, default) + + return self.getlist(key) + + def __setitem__(self, key, value): + """Like :meth:`set` but also supports index/slice based setting.""" + if isinstance(key, (slice, int)): + if isinstance(key, int): + value = [value] + value = [(_str_header_key(k), _str_header_value(v)) for (k, v) in value] + if isinstance(key, int): + self._list[key] = value[0] + else: + self._list[key] = value + else: + self.set(key, value) + + def update(self, *args, **kwargs): + """Replace headers in this object with items from another + headers object and keyword arguments. + + To extend existing keys instead of replacing, use :meth:`extend` + instead. + + If provided, the first argument can be another :class:`Headers` + object, a :class:`MultiDict`, :class:`dict`, or iterable of + pairs. + + .. versionadded:: 1.0 + """ + if len(args) > 1: + raise TypeError(f"update expected at most 1 arguments, got {len(args)}") + + if args: + mapping = args[0] + + if isinstance(mapping, (Headers, MultiDict)): + for key in mapping.keys(): + self.setlist(key, mapping.getlist(key)) + elif isinstance(mapping, dict): + for key, value in mapping.items(): + if isinstance(value, (list, tuple)): + self.setlist(key, value) + else: + self.set(key, value) + else: + for key, value in mapping: + self.set(key, value) + + for key, value in kwargs.items(): + if isinstance(value, (list, tuple)): + self.setlist(key, value) + else: + self.set(key, value) + + def to_wsgi_list(self): + """Convert the headers into a list suitable for WSGI. + + :return: list + """ + return list(self) + + def copy(self): + return self.__class__(self._list) + + def __copy__(self): + return self.copy() + + def __str__(self): + """Returns formatted headers suitable for HTTP transmission.""" + strs = [] + for key, value in self.to_wsgi_list(): + strs.append(f"{key}: {value}") + strs.append("\r\n") + return "\r\n".join(strs) + + def __repr__(self): + return f"{type(self).__name__}({list(self)!r})" + + +def _options_header_vkw(value: str, kw: dict[str, t.Any]): + return http.dump_options_header( + value, {k.replace("_", "-"): v for k, v in kw.items()} + ) + + +def _str_header_key(key: t.Any) -> str: + if not isinstance(key, str): + warnings.warn( + "Header keys must be strings. Passing other types is deprecated and will" + " not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(key, bytes): + key = key.decode("latin-1") + else: + key = str(key) + + return key + + +_newline_re = re.compile(r"[\r\n]") + + +def _str_header_value(value: t.Any) -> str: + if isinstance(value, bytes): + warnings.warn( + "Passing bytes as a header value is deprecated and will not be supported in" + " Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + value = value.decode("latin-1") + + if not isinstance(value, str): + value = str(value) + + if _newline_re.search(value) is not None: + raise ValueError("Header values must not contain newline characters.") + + return value + + +class EnvironHeaders(ImmutableHeadersMixin, Headers): + """Read only version of the headers from a WSGI environment. This + provides the same interface as `Headers` and is constructed from + a WSGI environment. + From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a + subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will + render a page for a ``400 BAD REQUEST`` if caught in a catch-all for + HTTP exceptions. + """ + + def __init__(self, environ): + self.environ = environ + + def __eq__(self, other): + return self.environ is other.environ + + __hash__ = None + + def __getitem__(self, key, _get_mode=False): + # _get_mode is a no-op for this class as there is no index but + # used because get() calls it. + if not isinstance(key, str): + raise KeyError(key) + key = key.upper().replace("-", "_") + if key in {"CONTENT_TYPE", "CONTENT_LENGTH"}: + return self.environ[key] + return self.environ[f"HTTP_{key}"] + + def __len__(self): + # the iter is necessary because otherwise list calls our + # len which would call list again and so forth. + return len(list(iter(self))) + + def __iter__(self): + for key, value in self.environ.items(): + if key.startswith("HTTP_") and key not in { + "HTTP_CONTENT_TYPE", + "HTTP_CONTENT_LENGTH", + }: + yield key[5:].replace("_", "-").title(), value + elif key in {"CONTENT_TYPE", "CONTENT_LENGTH"} and value: + yield key.replace("_", "-").title(), value + + def copy(self): + raise TypeError(f"cannot create {type(self).__name__!r} copies") + + +# circular dependencies +from .. import http diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/headers.pyi b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/headers.pyi new file mode 100644 index 0000000000000000000000000000000000000000..86502221ae86b415278bb369bcc542f29e76642f --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/headers.pyi @@ -0,0 +1,109 @@ +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from typing import Literal +from typing import NoReturn +from typing import overload +from typing import TypeVar + +from _typeshed import SupportsKeysAndGetItem +from _typeshed.wsgi import WSGIEnvironment + +from .mixins import ImmutableHeadersMixin + +D = TypeVar("D") +T = TypeVar("T") + +class Headers(dict[str, str]): + _list: list[tuple[str, str]] + def __init__( + self, + defaults: Mapping[str, str | Iterable[str]] + | Iterable[tuple[str, str]] + | None = None, + ) -> None: ... + @overload + def __getitem__(self, key: str) -> str: ... + @overload + def __getitem__(self, key: int) -> tuple[str, str]: ... + @overload + def __getitem__(self, key: slice) -> Headers: ... + @overload + def __getitem__(self, key: str, _get_mode: Literal[True] = ...) -> str: ... + def __eq__(self, other: object) -> bool: ... + @overload # type: ignore + def get(self, key: str, default: str) -> str: ... + @overload + def get(self, key: str, default: str | None = None) -> str | None: ... + @overload + def get( + self, key: str, default: T | None = None, type: Callable[[str], T] = ... + ) -> T | None: ... + @overload + def getlist(self, key: str) -> list[str]: ... + @overload + def getlist(self, key: str, type: Callable[[str], T]) -> list[T]: ... + def get_all(self, name: str) -> list[str]: ... + def items( # type: ignore + self, lower: bool = False + ) -> Iterator[tuple[str, str]]: ... + def keys(self, lower: bool = False) -> Iterator[str]: ... # type: ignore + def values(self) -> Iterator[str]: ... # type: ignore + def extend( + self, + *args: Mapping[str, str | Iterable[str]] | Iterable[tuple[str, str]], + **kwargs: str | Iterable[str], + ) -> None: ... + @overload + def __delitem__(self, key: str | int | slice) -> None: ... + @overload + def __delitem__(self, key: str, _index_operation: Literal[False]) -> None: ... + def remove(self, key: str) -> None: ... + @overload # type: ignore + def pop(self, key: str, default: str | None = None) -> str: ... + @overload + def pop( + self, key: int | None = None, default: tuple[str, str] | None = None + ) -> tuple[str, str]: ... + def popitem(self) -> tuple[str, str]: ... + def __contains__(self, key: str) -> bool: ... # type: ignore + def has_key(self, key: str) -> bool: ... + def __iter__(self) -> Iterator[tuple[str, str]]: ... # type: ignore + def add(self, _key: str, _value: str, **kw: str) -> None: ... + def _validate_value(self, value: str) -> None: ... + def add_header(self, _key: str, _value: str, **_kw: str) -> None: ... + def clear(self) -> None: ... + def set(self, _key: str, _value: str, **kw: str) -> None: ... + def setlist(self, key: str, values: Iterable[str]) -> None: ... + def setdefault(self, key: str, default: str) -> str: ... + def setlistdefault(self, key: str, default: Iterable[str]) -> None: ... + @overload + def __setitem__(self, key: str, value: str) -> None: ... + @overload + def __setitem__(self, key: int, value: tuple[str, str]) -> None: ... + @overload + def __setitem__(self, key: slice, value: Iterable[tuple[str, str]]) -> None: ... + @overload + def update( + self, __m: SupportsKeysAndGetItem[str, str], **kwargs: str | Iterable[str] + ) -> None: ... + @overload + def update( + self, __m: Iterable[tuple[str, str]], **kwargs: str | Iterable[str] + ) -> None: ... + @overload + def update(self, **kwargs: str | Iterable[str]) -> None: ... + def to_wsgi_list(self) -> list[tuple[str, str]]: ... + def copy(self) -> Headers: ... + def __copy__(self) -> Headers: ... + +class EnvironHeaders(ImmutableHeadersMixin, Headers): + environ: WSGIEnvironment + def __init__(self, environ: WSGIEnvironment) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __getitem__( # type: ignore + self, key: str, _get_mode: Literal[False] = False + ) -> str: ... + def __iter__(self) -> Iterator[tuple[str, str]]: ... # type: ignore + def copy(self) -> NoReturn: ... diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/mixins.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/mixins.py new file mode 100644 index 0000000000000000000000000000000000000000..2c84ca8f23d8a87d990b848b2f0472049022ac9a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/mixins.py @@ -0,0 +1,242 @@ +from __future__ import annotations + +from itertools import repeat + +from .._internal import _missing + + +def is_immutable(self): + raise TypeError(f"{type(self).__name__!r} objects are immutable") + + +class ImmutableListMixin: + """Makes a :class:`list` immutable. + + .. versionadded:: 0.5 + + :private: + """ + + _hash_cache = None + + def __hash__(self): + if self._hash_cache is not None: + return self._hash_cache + rv = self._hash_cache = hash(tuple(self)) + return rv + + def __reduce_ex__(self, protocol): + return type(self), (list(self),) + + def __delitem__(self, key): + is_immutable(self) + + def __iadd__(self, other): + is_immutable(self) + + def __imul__(self, other): + is_immutable(self) + + def __setitem__(self, key, value): + is_immutable(self) + + def append(self, item): + is_immutable(self) + + def remove(self, item): + is_immutable(self) + + def extend(self, iterable): + is_immutable(self) + + def insert(self, pos, value): + is_immutable(self) + + def pop(self, index=-1): + is_immutable(self) + + def reverse(self): + is_immutable(self) + + def sort(self, key=None, reverse=False): + is_immutable(self) + + +class ImmutableDictMixin: + """Makes a :class:`dict` immutable. + + .. versionadded:: 0.5 + + :private: + """ + + _hash_cache = None + + @classmethod + def fromkeys(cls, keys, value=None): + instance = super().__new__(cls) + instance.__init__(zip(keys, repeat(value))) + return instance + + def __reduce_ex__(self, protocol): + return type(self), (dict(self),) + + def _iter_hashitems(self): + return self.items() + + def __hash__(self): + if self._hash_cache is not None: + return self._hash_cache + rv = self._hash_cache = hash(frozenset(self._iter_hashitems())) + return rv + + def setdefault(self, key, default=None): + is_immutable(self) + + def update(self, *args, **kwargs): + is_immutable(self) + + def pop(self, key, default=None): + is_immutable(self) + + def popitem(self): + is_immutable(self) + + def __setitem__(self, key, value): + is_immutable(self) + + def __delitem__(self, key): + is_immutable(self) + + def clear(self): + is_immutable(self) + + +class ImmutableMultiDictMixin(ImmutableDictMixin): + """Makes a :class:`MultiDict` immutable. + + .. versionadded:: 0.5 + + :private: + """ + + def __reduce_ex__(self, protocol): + return type(self), (list(self.items(multi=True)),) + + def _iter_hashitems(self): + return self.items(multi=True) + + def add(self, key, value): + is_immutable(self) + + def popitemlist(self): + is_immutable(self) + + def poplist(self, key): + is_immutable(self) + + def setlist(self, key, new_list): + is_immutable(self) + + def setlistdefault(self, key, default_list=None): + is_immutable(self) + + +class ImmutableHeadersMixin: + """Makes a :class:`Headers` immutable. We do not mark them as + hashable though since the only usecase for this datastructure + in Werkzeug is a view on a mutable structure. + + .. versionadded:: 0.5 + + :private: + """ + + def __delitem__(self, key, **kwargs): + is_immutable(self) + + def __setitem__(self, key, value): + is_immutable(self) + + def set(self, _key, _value, **kwargs): + is_immutable(self) + + def setlist(self, key, values): + is_immutable(self) + + def add(self, _key, _value, **kwargs): + is_immutable(self) + + def add_header(self, _key, _value, **_kwargs): + is_immutable(self) + + def remove(self, key): + is_immutable(self) + + def extend(self, *args, **kwargs): + is_immutable(self) + + def update(self, *args, **kwargs): + is_immutable(self) + + def insert(self, pos, value): + is_immutable(self) + + def pop(self, key=None, default=_missing): + is_immutable(self) + + def popitem(self): + is_immutable(self) + + def setdefault(self, key, default): + is_immutable(self) + + def setlistdefault(self, key, default): + is_immutable(self) + + +def _calls_update(name): + def oncall(self, *args, **kw): + rv = getattr(super(UpdateDictMixin, self), name)(*args, **kw) + + if self.on_update is not None: + self.on_update(self) + + return rv + + oncall.__name__ = name + return oncall + + +class UpdateDictMixin(dict): + """Makes dicts call `self.on_update` on modifications. + + .. versionadded:: 0.5 + + :private: + """ + + on_update = None + + def setdefault(self, key, default=None): + modified = key not in self + rv = super().setdefault(key, default) + if modified and self.on_update is not None: + self.on_update(self) + return rv + + def pop(self, key, default=_missing): + modified = key in self + if default is _missing: + rv = super().pop(key) + else: + rv = super().pop(key, default) + if modified and self.on_update is not None: + self.on_update(self) + return rv + + __setitem__ = _calls_update("__setitem__") + __delitem__ = _calls_update("__delitem__") + clear = _calls_update("clear") + popitem = _calls_update("popitem") + update = _calls_update("update") diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/mixins.pyi b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/mixins.pyi new file mode 100644 index 0000000000000000000000000000000000000000..74ed4b81e2e83d3d4f2e1e7ba239c5c7e3a76f1b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/mixins.pyi @@ -0,0 +1,97 @@ +from collections.abc import Callable +from collections.abc import Hashable +from collections.abc import Iterable +from typing import Any +from typing import NoReturn +from typing import overload +from typing import SupportsIndex +from typing import TypeVar + +from _typeshed import SupportsKeysAndGetItem + +from .headers import Headers + +K = TypeVar("K") +T = TypeVar("T") +V = TypeVar("V") + +def is_immutable(self: object) -> NoReturn: ... + +class ImmutableListMixin(list[V]): + _hash_cache: int | None + def __hash__(self) -> int: ... # type: ignore + def __delitem__(self, key: SupportsIndex | slice) -> NoReturn: ... + def __iadd__(self, other: t.Any) -> NoReturn: ... # type: ignore + def __imul__(self, other: SupportsIndex) -> NoReturn: ... + def __setitem__(self, key: int | slice, value: V) -> NoReturn: ... # type: ignore + def append(self, value: V) -> NoReturn: ... + def remove(self, value: V) -> NoReturn: ... + def extend(self, values: Iterable[V]) -> NoReturn: ... + def insert(self, pos: SupportsIndex, value: V) -> NoReturn: ... + def pop(self, index: SupportsIndex = -1) -> NoReturn: ... + def reverse(self) -> NoReturn: ... + def sort( + self, key: Callable[[V], Any] | None = None, reverse: bool = False + ) -> NoReturn: ... + +class ImmutableDictMixin(dict[K, V]): + _hash_cache: int | None + @classmethod + def fromkeys( # type: ignore + cls, keys: Iterable[K], value: V | None = None + ) -> ImmutableDictMixin[K, V]: ... + def _iter_hashitems(self) -> Iterable[Hashable]: ... + def __hash__(self) -> int: ... # type: ignore + def setdefault(self, key: K, default: V | None = None) -> NoReturn: ... + def update(self, *args: Any, **kwargs: V) -> NoReturn: ... + def pop(self, key: K, default: V | None = None) -> NoReturn: ... # type: ignore + def popitem(self) -> NoReturn: ... + def __setitem__(self, key: K, value: V) -> NoReturn: ... + def __delitem__(self, key: K) -> NoReturn: ... + def clear(self) -> NoReturn: ... + +class ImmutableMultiDictMixin(ImmutableDictMixin[K, V]): + def _iter_hashitems(self) -> Iterable[Hashable]: ... + def add(self, key: K, value: V) -> NoReturn: ... + def popitemlist(self) -> NoReturn: ... + def poplist(self, key: K) -> NoReturn: ... + def setlist(self, key: K, new_list: Iterable[V]) -> NoReturn: ... + def setlistdefault( + self, key: K, default_list: Iterable[V] | None = None + ) -> NoReturn: ... + +class ImmutableHeadersMixin(Headers): + def __delitem__(self, key: Any, _index_operation: bool = True) -> NoReturn: ... + def __setitem__(self, key: Any, value: Any) -> NoReturn: ... + def set(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ... + def setlist(self, key: Any, values: Any) -> NoReturn: ... + def add(self, _key: Any, _value: Any, **kw: Any) -> NoReturn: ... + def add_header(self, _key: Any, _value: Any, **_kw: Any) -> NoReturn: ... + def remove(self, key: Any) -> NoReturn: ... + def extend(self, *args: Any, **kwargs: Any) -> NoReturn: ... + def update(self, *args: Any, **kwargs: Any) -> NoReturn: ... + def insert(self, pos: Any, value: Any) -> NoReturn: ... + def pop(self, key: Any = None, default: Any = ...) -> NoReturn: ... + def popitem(self) -> NoReturn: ... + def setdefault(self, key: Any, default: Any) -> NoReturn: ... + def setlistdefault(self, key: Any, default: Any) -> NoReturn: ... + +def _calls_update(name: str) -> Callable[[UpdateDictMixin[K, V]], Any]: ... + +class UpdateDictMixin(dict[K, V]): + on_update: Callable[[UpdateDictMixin[K, V] | None, None], None] + def setdefault(self, key: K, default: V | None = None) -> V: ... + @overload + def pop(self, key: K) -> V: ... + @overload + def pop(self, key: K, default: V | T = ...) -> V | T: ... + def __setitem__(self, key: K, value: V) -> None: ... + def __delitem__(self, key: K) -> None: ... + def clear(self) -> None: ... + def popitem(self) -> tuple[K, V]: ... + @overload + def update(self, __m: SupportsKeysAndGetItem[K, V], **kwargs: V) -> None: ... + @overload + def update(self, __m: Iterable[tuple[K, V]], **kwargs: V) -> None: ... + @overload + def update(self, **kwargs: V) -> None: ... diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/range.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/range.py new file mode 100644 index 0000000000000000000000000000000000000000..7011ea4ae33a17984d45138be3fabd1529f591b1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/range.py @@ -0,0 +1,180 @@ +from __future__ import annotations + + +class IfRange: + """Very simple object that represents the `If-Range` header in parsed + form. It will either have neither a etag or date or one of either but + never both. + + .. versionadded:: 0.7 + """ + + def __init__(self, etag=None, date=None): + #: The etag parsed and unquoted. Ranges always operate on strong + #: etags so the weakness information is not necessary. + self.etag = etag + #: The date in parsed format or `None`. + self.date = date + + def to_header(self): + """Converts the object back into an HTTP header.""" + if self.date is not None: + return http.http_date(self.date) + if self.etag is not None: + return http.quote_etag(self.etag) + return "" + + def __str__(self): + return self.to_header() + + def __repr__(self): + return f"<{type(self).__name__} {str(self)!r}>" + + +class Range: + """Represents a ``Range`` header. All methods only support only + bytes as the unit. Stores a list of ranges if given, but the methods + only work if only one range is provided. + + :raise ValueError: If the ranges provided are invalid. + + .. versionchanged:: 0.15 + The ranges passed in are validated. + + .. versionadded:: 0.7 + """ + + def __init__(self, units, ranges): + #: The units of this range. Usually "bytes". + self.units = units + #: A list of ``(begin, end)`` tuples for the range header provided. + #: The ranges are non-inclusive. + self.ranges = ranges + + for start, end in ranges: + if start is None or (end is not None and (start < 0 or start >= end)): + raise ValueError(f"{(start, end)} is not a valid range.") + + def range_for_length(self, length): + """If the range is for bytes, the length is not None and there is + exactly one range and it is satisfiable it returns a ``(start, stop)`` + tuple, otherwise `None`. + """ + if self.units != "bytes" or length is None or len(self.ranges) != 1: + return None + start, end = self.ranges[0] + if end is None: + end = length + if start < 0: + start += length + if http.is_byte_range_valid(start, end, length): + return start, min(end, length) + return None + + def make_content_range(self, length): + """Creates a :class:`~werkzeug.datastructures.ContentRange` object + from the current range and given content length. + """ + rng = self.range_for_length(length) + if rng is not None: + return ContentRange(self.units, rng[0], rng[1], length) + return None + + def to_header(self): + """Converts the object back into an HTTP header.""" + ranges = [] + for begin, end in self.ranges: + if end is None: + ranges.append(f"{begin}-" if begin >= 0 else str(begin)) + else: + ranges.append(f"{begin}-{end - 1}") + return f"{self.units}={','.join(ranges)}" + + def to_content_range_header(self, length): + """Converts the object into `Content-Range` HTTP header, + based on given length + """ + range = self.range_for_length(length) + if range is not None: + return f"{self.units} {range[0]}-{range[1] - 1}/{length}" + return None + + def __str__(self): + return self.to_header() + + def __repr__(self): + return f"<{type(self).__name__} {str(self)!r}>" + + +def _callback_property(name): + def fget(self): + return getattr(self, name) + + def fset(self, value): + setattr(self, name, value) + if self.on_update is not None: + self.on_update(self) + + return property(fget, fset) + + +class ContentRange: + """Represents the content range header. + + .. versionadded:: 0.7 + """ + + def __init__(self, units, start, stop, length=None, on_update=None): + assert http.is_byte_range_valid(start, stop, length), "Bad range provided" + self.on_update = on_update + self.set(start, stop, length, units) + + #: The units to use, usually "bytes" + units = _callback_property("_units") + #: The start point of the range or `None`. + start = _callback_property("_start") + #: The stop point of the range (non-inclusive) or `None`. Can only be + #: `None` if also start is `None`. + stop = _callback_property("_stop") + #: The length of the range or `None`. + length = _callback_property("_length") + + def set(self, start, stop, length=None, units="bytes"): + """Simple method to update the ranges.""" + assert http.is_byte_range_valid(start, stop, length), "Bad range provided" + self._units = units + self._start = start + self._stop = stop + self._length = length + if self.on_update is not None: + self.on_update(self) + + def unset(self): + """Sets the units to `None` which indicates that the header should + no longer be used. + """ + self.set(None, None, units=None) + + def to_header(self): + if self.units is None: + return "" + if self.length is None: + length = "*" + else: + length = self.length + if self.start is None: + return f"{self.units} */{length}" + return f"{self.units} {self.start}-{self.stop - 1}/{length}" + + def __bool__(self): + return self.units is not None + + def __str__(self): + return self.to_header() + + def __repr__(self): + return f"<{type(self).__name__} {str(self)!r}>" + + +# circular dependencies +from .. import http diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/range.pyi b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/range.pyi new file mode 100644 index 0000000000000000000000000000000000000000..f38ad69ef107582660501eebf969311b748e681c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/range.pyi @@ -0,0 +1,57 @@ +from collections.abc import Callable +from datetime import datetime + +class IfRange: + etag: str | None + date: datetime | None + def __init__( + self, etag: str | None = None, date: datetime | None = None + ) -> None: ... + def to_header(self) -> str: ... + +class Range: + units: str + ranges: list[tuple[int, int | None]] + def __init__(self, units: str, ranges: list[tuple[int, int | None]]) -> None: ... + def range_for_length(self, length: int | None) -> tuple[int, int] | None: ... + def make_content_range(self, length: int | None) -> ContentRange | None: ... + def to_header(self) -> str: ... + def to_content_range_header(self, length: int | None) -> str | None: ... + +def _callback_property(name: str) -> property: ... + +class ContentRange: + on_update: Callable[[ContentRange], None] | None + def __init__( + self, + units: str | None, + start: int | None, + stop: int | None, + length: int | None = None, + on_update: Callable[[ContentRange], None] | None = None, + ) -> None: ... + @property + def units(self) -> str | None: ... + @units.setter + def units(self, value: str | None) -> None: ... + @property + def start(self) -> int | None: ... + @start.setter + def start(self, value: int | None) -> None: ... + @property + def stop(self) -> int | None: ... + @stop.setter + def stop(self, value: int | None) -> None: ... + @property + def length(self) -> int | None: ... + @length.setter + def length(self, value: int | None) -> None: ... + def set( + self, + start: int | None, + stop: int | None, + length: int | None = None, + units: str | None = "bytes", + ) -> None: ... + def unset(self) -> None: ... + def to_header(self) -> str: ... diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/structures.py b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/structures.py new file mode 100644 index 0000000000000000000000000000000000000000..7ea7bee283fbf2241fbef614d184f79d3ec2eabf --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/structures.py @@ -0,0 +1,1006 @@ +from __future__ import annotations + +from collections.abc import MutableSet +from copy import deepcopy + +from .. import exceptions +from .._internal import _missing +from .mixins import ImmutableDictMixin +from .mixins import ImmutableListMixin +from .mixins import ImmutableMultiDictMixin +from .mixins import UpdateDictMixin + + +def is_immutable(self): + raise TypeError(f"{type(self).__name__!r} objects are immutable") + + +def iter_multi_items(mapping): + """Iterates over the items of a mapping yielding keys and values + without dropping any from more complex structures. + """ + if isinstance(mapping, MultiDict): + yield from mapping.items(multi=True) + elif isinstance(mapping, dict): + for key, value in mapping.items(): + if isinstance(value, (tuple, list)): + for v in value: + yield key, v + else: + yield key, value + else: + yield from mapping + + +class ImmutableList(ImmutableListMixin, list): + """An immutable :class:`list`. + + .. versionadded:: 0.5 + + :private: + """ + + def __repr__(self): + return f"{type(self).__name__}({list.__repr__(self)})" + + +class TypeConversionDict(dict): + """Works like a regular dict but the :meth:`get` method can perform + type conversions. :class:`MultiDict` and :class:`CombinedMultiDict` + are subclasses of this class and provide the same feature. + + .. versionadded:: 0.5 + """ + + def get(self, key, default=None, type=None): + """Return the default value if the requested data doesn't exist. + If `type` is provided and is a callable it should convert the value, + return it or raise a :exc:`ValueError` if that is not possible. In + this case the function will return the default as if the value was not + found: + + >>> d = TypeConversionDict(foo='42', bar='blub') + >>> d.get('foo', type=int) + 42 + >>> d.get('bar', -1, type=int) + -1 + + :param key: The key to be looked up. + :param default: The default value to be returned if the key can't + be looked up. If not further specified `None` is + returned. + :param type: A callable that is used to cast the value in the + :class:`MultiDict`. If a :exc:`ValueError` is raised + by this callable the default value is returned. + """ + try: + rv = self[key] + except KeyError: + return default + if type is not None: + try: + rv = type(rv) + except ValueError: + rv = default + return rv + + +class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict): + """Works like a :class:`TypeConversionDict` but does not support + modifications. + + .. versionadded:: 0.5 + """ + + def copy(self): + """Return a shallow mutable copy of this object. Keep in mind that + the standard library's :func:`copy` function is a no-op for this class + like for any other python immutable type (eg: :class:`tuple`). + """ + return TypeConversionDict(self) + + def __copy__(self): + return self + + +class MultiDict(TypeConversionDict): + """A :class:`MultiDict` is a dictionary subclass customized to deal with + multiple values for the same key which is for example used by the parsing + functions in the wrappers. This is necessary because some HTML form + elements pass multiple values for the same key. + + :class:`MultiDict` implements all standard dictionary methods. + Internally, it saves all values for a key as a list, but the standard dict + access methods will only return the first value for a key. If you want to + gain access to the other values, too, you have to use the `list` methods as + explained below. + + Basic Usage: + + >>> d = MultiDict([('a', 'b'), ('a', 'c')]) + >>> d + MultiDict([('a', 'b'), ('a', 'c')]) + >>> d['a'] + 'b' + >>> d.getlist('a') + ['b', 'c'] + >>> 'a' in d + True + + It behaves like a normal dict thus all dict functions will only return the + first value when multiple values for one key are found. + + From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a + subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will + render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP + exceptions. + + A :class:`MultiDict` can be constructed from an iterable of + ``(key, value)`` tuples, a dict, a :class:`MultiDict` or from Werkzeug 0.2 + onwards some keyword parameters. + + :param mapping: the initial value for the :class:`MultiDict`. Either a + regular dict, an iterable of ``(key, value)`` tuples + or `None`. + """ + + def __init__(self, mapping=None): + if isinstance(mapping, MultiDict): + dict.__init__(self, ((k, l[:]) for k, l in mapping.lists())) + elif isinstance(mapping, dict): + tmp = {} + for key, value in mapping.items(): + if isinstance(value, (tuple, list)): + if len(value) == 0: + continue + value = list(value) + else: + value = [value] + tmp[key] = value + dict.__init__(self, tmp) + else: + tmp = {} + for key, value in mapping or (): + tmp.setdefault(key, []).append(value) + dict.__init__(self, tmp) + + def __getstate__(self): + return dict(self.lists()) + + def __setstate__(self, value): + dict.clear(self) + dict.update(self, value) + + def __iter__(self): + # Work around https://bugs.python.org/issue43246. + # (`return super().__iter__()` also works here, which makes this look + # even more like it should be a no-op, yet it isn't.) + return dict.__iter__(self) + + def __getitem__(self, key): + """Return the first data value for this key; + raises KeyError if not found. + + :param key: The key to be looked up. + :raise KeyError: if the key does not exist. + """ + + if key in self: + lst = dict.__getitem__(self, key) + if len(lst) > 0: + return lst[0] + raise exceptions.BadRequestKeyError(key) + + def __setitem__(self, key, value): + """Like :meth:`add` but removes an existing key first. + + :param key: the key for the value. + :param value: the value to set. + """ + dict.__setitem__(self, key, [value]) + + def add(self, key, value): + """Adds a new value for the key. + + .. versionadded:: 0.6 + + :param key: the key for the value. + :param value: the value to add. + """ + dict.setdefault(self, key, []).append(value) + + def getlist(self, key, type=None): + """Return the list of items for a given key. If that key is not in the + `MultiDict`, the return value will be an empty list. Just like `get`, + `getlist` accepts a `type` parameter. All items will be converted + with the callable defined there. + + :param key: The key to be looked up. + :param type: A callable that is used to cast the value in the + :class:`MultiDict`. If a :exc:`ValueError` is raised + by this callable the value will be removed from the list. + :return: a :class:`list` of all the values for the key. + """ + try: + rv = dict.__getitem__(self, key) + except KeyError: + return [] + if type is None: + return list(rv) + result = [] + for item in rv: + try: + result.append(type(item)) + except ValueError: + pass + return result + + def setlist(self, key, new_list): + """Remove the old values for a key and add new ones. Note that the list + you pass the values in will be shallow-copied before it is inserted in + the dictionary. + + >>> d = MultiDict() + >>> d.setlist('foo', ['1', '2']) + >>> d['foo'] + '1' + >>> d.getlist('foo') + ['1', '2'] + + :param key: The key for which the values are set. + :param new_list: An iterable with the new values for the key. Old values + are removed first. + """ + dict.__setitem__(self, key, list(new_list)) + + def setdefault(self, key, default=None): + """Returns the value for the key if it is in the dict, otherwise it + returns `default` and sets that value for `key`. + + :param key: The key to be looked up. + :param default: The default value to be returned if the key is not + in the dict. If not further specified it's `None`. + """ + if key not in self: + self[key] = default + else: + default = self[key] + return default + + def setlistdefault(self, key, default_list=None): + """Like `setdefault` but sets multiple values. The list returned + is not a copy, but the list that is actually used internally. This + means that you can put new values into the dict by appending items + to the list: + + >>> d = MultiDict({"foo": 1}) + >>> d.setlistdefault("foo").extend([2, 3]) + >>> d.getlist("foo") + [1, 2, 3] + + :param key: The key to be looked up. + :param default_list: An iterable of default values. It is either copied + (in case it was a list) or converted into a list + before returned. + :return: a :class:`list` + """ + if key not in self: + default_list = list(default_list or ()) + dict.__setitem__(self, key, default_list) + else: + default_list = dict.__getitem__(self, key) + return default_list + + def items(self, multi=False): + """Return an iterator of ``(key, value)`` pairs. + + :param multi: If set to `True` the iterator returned will have a pair + for each value of each key. Otherwise it will only + contain pairs for the first value of each key. + """ + for key, values in dict.items(self): + if multi: + for value in values: + yield key, value + else: + yield key, values[0] + + def lists(self): + """Return a iterator of ``(key, values)`` pairs, where values is the list + of all values associated with the key.""" + for key, values in dict.items(self): + yield key, list(values) + + def values(self): + """Returns an iterator of the first value on every key's value list.""" + for values in dict.values(self): + yield values[0] + + def listvalues(self): + """Return an iterator of all values associated with a key. Zipping + :meth:`keys` and this is the same as calling :meth:`lists`: + + >>> d = MultiDict({"foo": [1, 2, 3]}) + >>> zip(d.keys(), d.listvalues()) == d.lists() + True + """ + return dict.values(self) + + def copy(self): + """Return a shallow copy of this object.""" + return self.__class__(self) + + def deepcopy(self, memo=None): + """Return a deep copy of this object.""" + return self.__class__(deepcopy(self.to_dict(flat=False), memo)) + + def to_dict(self, flat=True): + """Return the contents as regular dict. If `flat` is `True` the + returned dict will only have the first item present, if `flat` is + `False` all values will be returned as lists. + + :param flat: If set to `False` the dict returned will have lists + with all the values in it. Otherwise it will only + contain the first value for each key. + :return: a :class:`dict` + """ + if flat: + return dict(self.items()) + return dict(self.lists()) + + def update(self, mapping): + """update() extends rather than replaces existing key lists: + + >>> a = MultiDict({'x': 1}) + >>> b = MultiDict({'x': 2, 'y': 3}) + >>> a.update(b) + >>> a + MultiDict([('y', 3), ('x', 1), ('x', 2)]) + + If the value list for a key in ``other_dict`` is empty, no new values + will be added to the dict and the key will not be created: + + >>> x = {'empty_list': []} + >>> y = MultiDict() + >>> y.update(x) + >>> y + MultiDict([]) + """ + for key, value in iter_multi_items(mapping): + MultiDict.add(self, key, value) + + def pop(self, key, default=_missing): + """Pop the first item for a list on the dict. Afterwards the + key is removed from the dict, so additional values are discarded: + + >>> d = MultiDict({"foo": [1, 2, 3]}) + >>> d.pop("foo") + 1 + >>> "foo" in d + False + + :param key: the key to pop. + :param default: if provided the value to return if the key was + not in the dictionary. + """ + try: + lst = dict.pop(self, key) + + if len(lst) == 0: + raise exceptions.BadRequestKeyError(key) + + return lst[0] + except KeyError: + if default is not _missing: + return default + + raise exceptions.BadRequestKeyError(key) from None + + def popitem(self): + """Pop an item from the dict.""" + try: + item = dict.popitem(self) + + if len(item[1]) == 0: + raise exceptions.BadRequestKeyError(item[0]) + + return (item[0], item[1][0]) + except KeyError as e: + raise exceptions.BadRequestKeyError(e.args[0]) from None + + def poplist(self, key): + """Pop the list for a key from the dict. If the key is not in the dict + an empty list is returned. + + .. versionchanged:: 0.5 + If the key does no longer exist a list is returned instead of + raising an error. + """ + return dict.pop(self, key, []) + + def popitemlist(self): + """Pop a ``(key, list)`` tuple from the dict.""" + try: + return dict.popitem(self) + except KeyError as e: + raise exceptions.BadRequestKeyError(e.args[0]) from None + + def __copy__(self): + return self.copy() + + def __deepcopy__(self, memo): + return self.deepcopy(memo=memo) + + def __repr__(self): + return f"{type(self).__name__}({list(self.items(multi=True))!r})" + + +class _omd_bucket: + """Wraps values in the :class:`OrderedMultiDict`. This makes it + possible to keep an order over multiple different keys. It requires + a lot of extra memory and slows down access a lot, but makes it + possible to access elements in O(1) and iterate in O(n). + """ + + __slots__ = ("prev", "key", "value", "next") + + def __init__(self, omd, key, value): + self.prev = omd._last_bucket + self.key = key + self.value = value + self.next = None + + if omd._first_bucket is None: + omd._first_bucket = self + if omd._last_bucket is not None: + omd._last_bucket.next = self + omd._last_bucket = self + + def unlink(self, omd): + if self.prev: + self.prev.next = self.next + if self.next: + self.next.prev = self.prev + if omd._first_bucket is self: + omd._first_bucket = self.next + if omd._last_bucket is self: + omd._last_bucket = self.prev + + +class OrderedMultiDict(MultiDict): + """Works like a regular :class:`MultiDict` but preserves the + order of the fields. To convert the ordered multi dict into a + list you can use the :meth:`items` method and pass it ``multi=True``. + + In general an :class:`OrderedMultiDict` is an order of magnitude + slower than a :class:`MultiDict`. + + .. admonition:: note + + Due to a limitation in Python you cannot convert an ordered + multi dict into a regular dict by using ``dict(multidict)``. + Instead you have to use the :meth:`to_dict` method, otherwise + the internal bucket objects are exposed. + """ + + def __init__(self, mapping=None): + dict.__init__(self) + self._first_bucket = self._last_bucket = None + if mapping is not None: + OrderedMultiDict.update(self, mapping) + + def __eq__(self, other): + if not isinstance(other, MultiDict): + return NotImplemented + if isinstance(other, OrderedMultiDict): + iter1 = iter(self.items(multi=True)) + iter2 = iter(other.items(multi=True)) + try: + for k1, v1 in iter1: + k2, v2 = next(iter2) + if k1 != k2 or v1 != v2: + return False + except StopIteration: + return False + try: + next(iter2) + except StopIteration: + return True + return False + if len(self) != len(other): + return False + for key, values in self.lists(): + if other.getlist(key) != values: + return False + return True + + __hash__ = None + + def __reduce_ex__(self, protocol): + return type(self), (list(self.items(multi=True)),) + + def __getstate__(self): + return list(self.items(multi=True)) + + def __setstate__(self, values): + dict.clear(self) + for key, value in values: + self.add(key, value) + + def __getitem__(self, key): + if key in self: + return dict.__getitem__(self, key)[0].value + raise exceptions.BadRequestKeyError(key) + + def __setitem__(self, key, value): + self.poplist(key) + self.add(key, value) + + def __delitem__(self, key): + self.pop(key) + + def keys(self): + return (key for key, value in self.items()) + + def __iter__(self): + return iter(self.keys()) + + def values(self): + return (value for key, value in self.items()) + + def items(self, multi=False): + ptr = self._first_bucket + if multi: + while ptr is not None: + yield ptr.key, ptr.value + ptr = ptr.next + else: + returned_keys = set() + while ptr is not None: + if ptr.key not in returned_keys: + returned_keys.add(ptr.key) + yield ptr.key, ptr.value + ptr = ptr.next + + def lists(self): + returned_keys = set() + ptr = self._first_bucket + while ptr is not None: + if ptr.key not in returned_keys: + yield ptr.key, self.getlist(ptr.key) + returned_keys.add(ptr.key) + ptr = ptr.next + + def listvalues(self): + for _key, values in self.lists(): + yield values + + def add(self, key, value): + dict.setdefault(self, key, []).append(_omd_bucket(self, key, value)) + + def getlist(self, key, type=None): + try: + rv = dict.__getitem__(self, key) + except KeyError: + return [] + if type is None: + return [x.value for x in rv] + result = [] + for item in rv: + try: + result.append(type(item.value)) + except ValueError: + pass + return result + + def setlist(self, key, new_list): + self.poplist(key) + for value in new_list: + self.add(key, value) + + def setlistdefault(self, key, default_list=None): + raise TypeError("setlistdefault is unsupported for ordered multi dicts") + + def update(self, mapping): + for key, value in iter_multi_items(mapping): + OrderedMultiDict.add(self, key, value) + + def poplist(self, key): + buckets = dict.pop(self, key, ()) + for bucket in buckets: + bucket.unlink(self) + return [x.value for x in buckets] + + def pop(self, key, default=_missing): + try: + buckets = dict.pop(self, key) + except KeyError: + if default is not _missing: + return default + + raise exceptions.BadRequestKeyError(key) from None + + for bucket in buckets: + bucket.unlink(self) + + return buckets[0].value + + def popitem(self): + try: + key, buckets = dict.popitem(self) + except KeyError as e: + raise exceptions.BadRequestKeyError(e.args[0]) from None + + for bucket in buckets: + bucket.unlink(self) + + return key, buckets[0].value + + def popitemlist(self): + try: + key, buckets = dict.popitem(self) + except KeyError as e: + raise exceptions.BadRequestKeyError(e.args[0]) from None + + for bucket in buckets: + bucket.unlink(self) + + return key, [x.value for x in buckets] + + +class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict): + """A read only :class:`MultiDict` that you can pass multiple :class:`MultiDict` + instances as sequence and it will combine the return values of all wrapped + dicts: + + >>> from werkzeug.datastructures import CombinedMultiDict, MultiDict + >>> post = MultiDict([('foo', 'bar')]) + >>> get = MultiDict([('blub', 'blah')]) + >>> combined = CombinedMultiDict([get, post]) + >>> combined['foo'] + 'bar' + >>> combined['blub'] + 'blah' + + This works for all read operations and will raise a `TypeError` for + methods that usually change data which isn't possible. + + From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a + subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will + render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP + exceptions. + """ + + def __reduce_ex__(self, protocol): + return type(self), (self.dicts,) + + def __init__(self, dicts=None): + self.dicts = list(dicts) or [] + + @classmethod + def fromkeys(cls, keys, value=None): + raise TypeError(f"cannot create {cls.__name__!r} instances by fromkeys") + + def __getitem__(self, key): + for d in self.dicts: + if key in d: + return d[key] + raise exceptions.BadRequestKeyError(key) + + def get(self, key, default=None, type=None): + for d in self.dicts: + if key in d: + if type is not None: + try: + return type(d[key]) + except ValueError: + continue + return d[key] + return default + + def getlist(self, key, type=None): + rv = [] + for d in self.dicts: + rv.extend(d.getlist(key, type)) + return rv + + def _keys_impl(self): + """This function exists so __len__ can be implemented more efficiently, + saving one list creation from an iterator. + """ + rv = set() + rv.update(*self.dicts) + return rv + + def keys(self): + return self._keys_impl() + + def __iter__(self): + return iter(self.keys()) + + def items(self, multi=False): + found = set() + for d in self.dicts: + for key, value in d.items(multi): + if multi: + yield key, value + elif key not in found: + found.add(key) + yield key, value + + def values(self): + for _key, value in self.items(): + yield value + + def lists(self): + rv = {} + for d in self.dicts: + for key, values in d.lists(): + rv.setdefault(key, []).extend(values) + return list(rv.items()) + + def listvalues(self): + return (x[1] for x in self.lists()) + + def copy(self): + """Return a shallow mutable copy of this object. + + This returns a :class:`MultiDict` representing the data at the + time of copying. The copy will no longer reflect changes to the + wrapped dicts. + + .. versionchanged:: 0.15 + Return a mutable :class:`MultiDict`. + """ + return MultiDict(self) + + def to_dict(self, flat=True): + """Return the contents as regular dict. If `flat` is `True` the + returned dict will only have the first item present, if `flat` is + `False` all values will be returned as lists. + + :param flat: If set to `False` the dict returned will have lists + with all the values in it. Otherwise it will only + contain the first item for each key. + :return: a :class:`dict` + """ + if flat: + return dict(self.items()) + + return dict(self.lists()) + + def __len__(self): + return len(self._keys_impl()) + + def __contains__(self, key): + for d in self.dicts: + if key in d: + return True + return False + + def __repr__(self): + return f"{type(self).__name__}({self.dicts!r})" + + +class ImmutableDict(ImmutableDictMixin, dict): + """An immutable :class:`dict`. + + .. versionadded:: 0.5 + """ + + def __repr__(self): + return f"{type(self).__name__}({dict.__repr__(self)})" + + def copy(self): + """Return a shallow mutable copy of this object. Keep in mind that + the standard library's :func:`copy` function is a no-op for this class + like for any other python immutable type (eg: :class:`tuple`). + """ + return dict(self) + + def __copy__(self): + return self + + +class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict): + """An immutable :class:`MultiDict`. + + .. versionadded:: 0.5 + """ + + def copy(self): + """Return a shallow mutable copy of this object. Keep in mind that + the standard library's :func:`copy` function is a no-op for this class + like for any other python immutable type (eg: :class:`tuple`). + """ + return MultiDict(self) + + def __copy__(self): + return self + + +class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict): + """An immutable :class:`OrderedMultiDict`. + + .. versionadded:: 0.6 + """ + + def _iter_hashitems(self): + return enumerate(self.items(multi=True)) + + def copy(self): + """Return a shallow mutable copy of this object. Keep in mind that + the standard library's :func:`copy` function is a no-op for this class + like for any other python immutable type (eg: :class:`tuple`). + """ + return OrderedMultiDict(self) + + def __copy__(self): + return self + + +class CallbackDict(UpdateDictMixin, dict): + """A dict that calls a function passed every time something is changed. + The function is passed the dict instance. + """ + + def __init__(self, initial=None, on_update=None): + dict.__init__(self, initial or ()) + self.on_update = on_update + + def __repr__(self): + return f"<{type(self).__name__} {dict.__repr__(self)}>" + + +class HeaderSet(MutableSet): + """Similar to the :class:`ETags` class this implements a set-like structure. + Unlike :class:`ETags` this is case insensitive and used for vary, allow, and + content-language headers. + + If not constructed using the :func:`parse_set_header` function the + instantiation works like this: + + >>> hs = HeaderSet(['foo', 'bar', 'baz']) + >>> hs + HeaderSet(['foo', 'bar', 'baz']) + """ + + def __init__(self, headers=None, on_update=None): + self._headers = list(headers or ()) + self._set = {x.lower() for x in self._headers} + self.on_update = on_update + + def add(self, header): + """Add a new header to the set.""" + self.update((header,)) + + def remove(self, header): + """Remove a header from the set. This raises an :exc:`KeyError` if the + header is not in the set. + + .. versionchanged:: 0.5 + In older versions a :exc:`IndexError` was raised instead of a + :exc:`KeyError` if the object was missing. + + :param header: the header to be removed. + """ + key = header.lower() + if key not in self._set: + raise KeyError(header) + self._set.remove(key) + for idx, key in enumerate(self._headers): + if key.lower() == header: + del self._headers[idx] + break + if self.on_update is not None: + self.on_update(self) + + def update(self, iterable): + """Add all the headers from the iterable to the set. + + :param iterable: updates the set with the items from the iterable. + """ + inserted_any = False + for header in iterable: + key = header.lower() + if key not in self._set: + self._headers.append(header) + self._set.add(key) + inserted_any = True + if inserted_any and self.on_update is not None: + self.on_update(self) + + def discard(self, header): + """Like :meth:`remove` but ignores errors. + + :param header: the header to be discarded. + """ + try: + self.remove(header) + except KeyError: + pass + + def find(self, header): + """Return the index of the header in the set or return -1 if not found. + + :param header: the header to be looked up. + """ + header = header.lower() + for idx, item in enumerate(self._headers): + if item.lower() == header: + return idx + return -1 + + def index(self, header): + """Return the index of the header in the set or raise an + :exc:`IndexError`. + + :param header: the header to be looked up. + """ + rv = self.find(header) + if rv < 0: + raise IndexError(header) + return rv + + def clear(self): + """Clear the set.""" + self._set.clear() + del self._headers[:] + if self.on_update is not None: + self.on_update(self) + + def as_set(self, preserve_casing=False): + """Return the set as real python set type. When calling this, all + the items are converted to lowercase and the ordering is lost. + + :param preserve_casing: if set to `True` the items in the set returned + will have the original case like in the + :class:`HeaderSet`, otherwise they will + be lowercase. + """ + if preserve_casing: + return set(self._headers) + return set(self._set) + + def to_header(self): + """Convert the header set into an HTTP header string.""" + return ", ".join(map(http.quote_header_value, self._headers)) + + def __getitem__(self, idx): + return self._headers[idx] + + def __delitem__(self, idx): + rv = self._headers.pop(idx) + self._set.remove(rv.lower()) + if self.on_update is not None: + self.on_update(self) + + def __setitem__(self, idx, value): + old = self._headers[idx] + self._set.remove(old.lower()) + self._headers[idx] = value + self._set.add(value.lower()) + if self.on_update is not None: + self.on_update(self) + + def __contains__(self, header): + return header.lower() in self._set + + def __len__(self): + return len(self._set) + + def __iter__(self): + return iter(self._headers) + + def __bool__(self): + return bool(self._set) + + def __str__(self): + return self.to_header() + + def __repr__(self): + return f"{type(self).__name__}({self._headers!r})" + + +# circular dependencies +from .. import http diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/structures.pyi b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/structures.pyi new file mode 100644 index 0000000000000000000000000000000000000000..2e7af35bec19e8dcccc544dbd6eb909a98819ac1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/datastructures/structures.pyi @@ -0,0 +1,208 @@ +from collections.abc import Callable +from collections.abc import Iterable +from collections.abc import Iterator +from collections.abc import Mapping +from typing import Any +from typing import Generic +from typing import Literal +from typing import NoReturn +from typing import overload +from typing import TypeVar + +from .mixins import ( + ImmutableDictMixin, + ImmutableListMixin, + ImmutableMultiDictMixin, + UpdateDictMixin, +) + +D = TypeVar("D") +K = TypeVar("K") +T = TypeVar("T") +V = TypeVar("V") +_CD = TypeVar("_CD", bound="CallbackDict") + +def is_immutable(self: object) -> NoReturn: ... +def iter_multi_items( + mapping: Mapping[K, V | Iterable[V]] | Iterable[tuple[K, V]] +) -> Iterator[tuple[K, V]]: ... + +class ImmutableList(ImmutableListMixin[V]): ... + +class TypeConversionDict(dict[K, V]): + @overload + def get(self, key: K, default: None = ..., type: None = ...) -> V | None: ... + @overload + def get(self, key: K, default: D, type: None = ...) -> D | V: ... + @overload + def get(self, key: K, default: D, type: Callable[[V], T]) -> D | T: ... + @overload + def get(self, key: K, type: Callable[[V], T]) -> T | None: ... + +class ImmutableTypeConversionDict(ImmutableDictMixin[K, V], TypeConversionDict[K, V]): + def copy(self) -> TypeConversionDict[K, V]: ... + def __copy__(self) -> ImmutableTypeConversionDict: ... + +class MultiDict(TypeConversionDict[K, V]): + def __init__( + self, + mapping: Mapping[K, Iterable[V] | V] | Iterable[tuple[K, V]] | None = None, + ) -> None: ... + def __getitem__(self, item: K) -> V: ... + def __setitem__(self, key: K, value: V) -> None: ... + def add(self, key: K, value: V) -> None: ... + @overload + def getlist(self, key: K) -> list[V]: ... + @overload + def getlist(self, key: K, type: Callable[[V], T] = ...) -> list[T]: ... + def setlist(self, key: K, new_list: Iterable[V]) -> None: ... + def setdefault(self, key: K, default: V | None = None) -> V: ... + def setlistdefault( + self, key: K, default_list: Iterable[V] | None = None + ) -> list[V]: ... + def items(self, multi: bool = False) -> Iterator[tuple[K, V]]: ... # type: ignore + def lists(self) -> Iterator[tuple[K, list[V]]]: ... + def values(self) -> Iterator[V]: ... # type: ignore + def listvalues(self) -> Iterator[list[V]]: ... + def copy(self) -> MultiDict[K, V]: ... + def deepcopy(self, memo: Any = None) -> MultiDict[K, V]: ... + @overload + def to_dict(self) -> dict[K, V]: ... + @overload + def to_dict(self, flat: Literal[False]) -> dict[K, list[V]]: ... + def update( # type: ignore + self, mapping: Mapping[K, Iterable[V] | V] | Iterable[tuple[K, V]] + ) -> None: ... + @overload + def pop(self, key: K) -> V: ... + @overload + def pop(self, key: K, default: V | T = ...) -> V | T: ... + def popitem(self) -> tuple[K, V]: ... + def poplist(self, key: K) -> list[V]: ... + def popitemlist(self) -> tuple[K, list[V]]: ... + def __copy__(self) -> MultiDict[K, V]: ... + def __deepcopy__(self, memo: Any) -> MultiDict[K, V]: ... + +class _omd_bucket(Generic[K, V]): + prev: _omd_bucket | None + next: _omd_bucket | None + key: K + value: V + def __init__(self, omd: OrderedMultiDict, key: K, value: V) -> None: ... + def unlink(self, omd: OrderedMultiDict) -> None: ... + +class OrderedMultiDict(MultiDict[K, V]): + _first_bucket: _omd_bucket | None + _last_bucket: _omd_bucket | None + def __init__(self, mapping: Mapping[K, V] | None = None) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __getitem__(self, key: K) -> V: ... + def __setitem__(self, key: K, value: V) -> None: ... + def __delitem__(self, key: K) -> None: ... + def keys(self) -> Iterator[K]: ... # type: ignore + def __iter__(self) -> Iterator[K]: ... + def values(self) -> Iterator[V]: ... # type: ignore + def items(self, multi: bool = False) -> Iterator[tuple[K, V]]: ... # type: ignore + def lists(self) -> Iterator[tuple[K, list[V]]]: ... + def listvalues(self) -> Iterator[list[V]]: ... + def add(self, key: K, value: V) -> None: ... + @overload + def getlist(self, key: K) -> list[V]: ... + @overload + def getlist(self, key: K, type: Callable[[V], T] = ...) -> list[T]: ... + def setlist(self, key: K, new_list: Iterable[V]) -> None: ... + def setlistdefault( + self, key: K, default_list: Iterable[V] | None = None + ) -> list[V]: ... + def update( # type: ignore + self, mapping: Mapping[K, V] | Iterable[tuple[K, V]] + ) -> None: ... + def poplist(self, key: K) -> list[V]: ... + @overload + def pop(self, key: K) -> V: ... + @overload + def pop(self, key: K, default: V | T = ...) -> V | T: ... + def popitem(self) -> tuple[K, V]: ... + def popitemlist(self) -> tuple[K, list[V]]: ... + +class CombinedMultiDict(ImmutableMultiDictMixin[K, V], MultiDict[K, V]): # type: ignore + dicts: list[MultiDict[K, V]] + def __init__(self, dicts: Iterable[MultiDict[K, V]] | None) -> None: ... + @classmethod + def fromkeys(cls, keys: Any, value: Any = None) -> NoReturn: ... + def __getitem__(self, key: K) -> V: ... + @overload # type: ignore + def get(self, key: K) -> V | None: ... + @overload + def get(self, key: K, default: V | T = ...) -> V | T: ... + @overload + def get( + self, key: K, default: T | None = None, type: Callable[[V], T] = ... + ) -> T | None: ... + @overload + def getlist(self, key: K) -> list[V]: ... + @overload + def getlist(self, key: K, type: Callable[[V], T] = ...) -> list[T]: ... + def _keys_impl(self) -> set[K]: ... + def keys(self) -> set[K]: ... # type: ignore + def __iter__(self) -> set[K]: ... # type: ignore + def items(self, multi: bool = False) -> Iterator[tuple[K, V]]: ... # type: ignore + def values(self) -> Iterator[V]: ... # type: ignore + def lists(self) -> Iterator[tuple[K, list[V]]]: ... + def listvalues(self) -> Iterator[list[V]]: ... + def copy(self) -> MultiDict[K, V]: ... + @overload + def to_dict(self) -> dict[K, V]: ... + @overload + def to_dict(self, flat: Literal[False]) -> dict[K, list[V]]: ... + def __contains__(self, key: K) -> bool: ... # type: ignore + def has_key(self, key: K) -> bool: ... + +class ImmutableDict(ImmutableDictMixin[K, V], dict[K, V]): + def copy(self) -> dict[K, V]: ... + def __copy__(self) -> ImmutableDict[K, V]: ... + +class ImmutableMultiDict( # type: ignore + ImmutableMultiDictMixin[K, V], MultiDict[K, V] +): + def copy(self) -> MultiDict[K, V]: ... + def __copy__(self) -> ImmutableMultiDict[K, V]: ... + +class ImmutableOrderedMultiDict( # type: ignore + ImmutableMultiDictMixin[K, V], OrderedMultiDict[K, V] +): + def _iter_hashitems(self) -> Iterator[tuple[int, tuple[K, V]]]: ... + def copy(self) -> OrderedMultiDict[K, V]: ... + def __copy__(self) -> ImmutableOrderedMultiDict[K, V]: ... + +class CallbackDict(UpdateDictMixin[K, V], dict[K, V]): + def __init__( + self, + initial: Mapping[K, V] | Iterable[tuple[K, V]] | None = None, + on_update: Callable[[_CD], None] | None = None, + ) -> None: ... + +class HeaderSet(set[str]): + _headers: list[str] + _set: set[str] + on_update: Callable[[HeaderSet], None] | None + def __init__( + self, + headers: Iterable[str] | None = None, + on_update: Callable[[HeaderSet], None] | None = None, + ) -> None: ... + def add(self, header: str) -> None: ... + def remove(self, header: str) -> None: ... + def update(self, iterable: Iterable[str]) -> None: ... # type: ignore + def discard(self, header: str) -> None: ... + def find(self, header: str) -> int: ... + def index(self, header: str) -> int: ... + def clear(self) -> None: ... + def as_set(self, preserve_casing: bool = False) -> set[str]: ... + def to_header(self) -> str: ... + def __getitem__(self, idx: int) -> str: ... + def __delitem__(self, idx: int) -> None: ... + def __setitem__(self, idx: int, value: str) -> None: ... + def __contains__(self, header: str) -> bool: ... # type: ignore + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[str]: ... diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/__init__.py b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3b04b534ecc4209e4038beb08905ef65a6f1644d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__init__.py @@ -0,0 +1,534 @@ +from __future__ import annotations + +import getpass +import hashlib +import json +import os +import pkgutil +import re +import sys +import time +import typing as t +import uuid +from contextlib import ExitStack +from io import BytesIO +from itertools import chain +from os.path import basename +from os.path import join +from zlib import adler32 + +from .._internal import _log +from ..exceptions import NotFound +from ..http import parse_cookie +from ..security import gen_salt +from ..utils import send_file +from ..wrappers.request import Request +from ..wrappers.response import Response +from .console import Console +from .tbtools import DebugFrameSummary +from .tbtools import DebugTraceback +from .tbtools import render_console_html + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + +# A week +PIN_TIME = 60 * 60 * 24 * 7 + + +def hash_pin(pin: str) -> str: + return hashlib.sha1(f"{pin} added salt".encode("utf-8", "replace")).hexdigest()[:12] + + +_machine_id: str | bytes | None = None + + +def get_machine_id() -> str | bytes | None: + global _machine_id + + if _machine_id is not None: + return _machine_id + + def _generate() -> str | bytes | None: + linux = b"" + + # machine-id is stable across boots, boot_id is not. + for filename in "/etc/machine-id", "/proc/sys/kernel/random/boot_id": + try: + with open(filename, "rb") as f: + value = f.readline().strip() + except OSError: + continue + + if value: + linux += value + break + + # Containers share the same machine id, add some cgroup + # information. This is used outside containers too but should be + # relatively stable across boots. + try: + with open("/proc/self/cgroup", "rb") as f: + linux += f.readline().strip().rpartition(b"/")[2] + except OSError: + pass + + if linux: + return linux + + # On OS X, use ioreg to get the computer's serial number. + try: + # subprocess may not be available, e.g. Google App Engine + # https://github.com/pallets/werkzeug/issues/925 + from subprocess import Popen, PIPE + + dump = Popen( + ["ioreg", "-c", "IOPlatformExpertDevice", "-d", "2"], stdout=PIPE + ).communicate()[0] + match = re.search(b'"serial-number" = <([^>]+)', dump) + + if match is not None: + return match.group(1) + except (OSError, ImportError): + pass + + # On Windows, use winreg to get the machine guid. + if sys.platform == "win32": + import winreg + + try: + with winreg.OpenKey( + winreg.HKEY_LOCAL_MACHINE, + "SOFTWARE\\Microsoft\\Cryptography", + 0, + winreg.KEY_READ | winreg.KEY_WOW64_64KEY, + ) as rk: + guid: str | bytes + guid_type: int + guid, guid_type = winreg.QueryValueEx(rk, "MachineGuid") + + if guid_type == winreg.REG_SZ: + return guid.encode("utf-8") + + return guid + except OSError: + pass + + return None + + _machine_id = _generate() + return _machine_id + + +class _ConsoleFrame: + """Helper class so that we can reuse the frame console code for the + standalone console. + """ + + def __init__(self, namespace: dict[str, t.Any]): + self.console = Console(namespace) + self.id = 0 + + def eval(self, code: str) -> t.Any: + return self.console.eval(code) + + +def get_pin_and_cookie_name( + app: WSGIApplication, +) -> tuple[str, str] | tuple[None, None]: + """Given an application object this returns a semi-stable 9 digit pin + code and a random key. The hope is that this is stable between + restarts to not make debugging particularly frustrating. If the pin + was forcefully disabled this returns `None`. + + Second item in the resulting tuple is the cookie name for remembering. + """ + pin = os.environ.get("WERKZEUG_DEBUG_PIN") + rv = None + num = None + + # Pin was explicitly disabled + if pin == "off": + return None, None + + # Pin was provided explicitly + if pin is not None and pin.replace("-", "").isdecimal(): + # If there are separators in the pin, return it directly + if "-" in pin: + rv = pin + else: + num = pin + + modname = getattr(app, "__module__", t.cast(object, app).__class__.__module__) + username: str | None + + try: + # getuser imports the pwd module, which does not exist in Google + # App Engine. It may also raise a KeyError if the UID does not + # have a username, such as in Docker. + username = getpass.getuser() + except (ImportError, KeyError): + username = None + + mod = sys.modules.get(modname) + + # This information only exists to make the cookie unique on the + # computer, not as a security feature. + probably_public_bits = [ + username, + modname, + getattr(app, "__name__", type(app).__name__), + getattr(mod, "__file__", None), + ] + + # This information is here to make it harder for an attacker to + # guess the cookie name. They are unlikely to be contained anywhere + # within the unauthenticated debug page. + private_bits = [str(uuid.getnode()), get_machine_id()] + + h = hashlib.sha1() + for bit in chain(probably_public_bits, private_bits): + if not bit: + continue + if isinstance(bit, str): + bit = bit.encode("utf-8") + h.update(bit) + h.update(b"cookiesalt") + + cookie_name = f"__wzd{h.hexdigest()[:20]}" + + # If we need to generate a pin we salt it a bit more so that we don't + # end up with the same value and generate out 9 digits + if num is None: + h.update(b"pinsalt") + num = f"{int(h.hexdigest(), 16):09d}"[:9] + + # Format the pincode in groups of digits for easier remembering if + # we don't have a result yet. + if rv is None: + for group_size in 5, 4, 3: + if len(num) % group_size == 0: + rv = "-".join( + num[x : x + group_size].rjust(group_size, "0") + for x in range(0, len(num), group_size) + ) + break + else: + rv = num + + return rv, cookie_name + + +class DebuggedApplication: + """Enables debugging support for a given application:: + + from werkzeug.debug import DebuggedApplication + from myapp import app + app = DebuggedApplication(app, evalex=True) + + The ``evalex`` argument allows evaluating expressions in any frame + of a traceback. This works by preserving each frame with its local + state. Some state, such as context globals, cannot be restored with + the frame by default. When ``evalex`` is enabled, + ``environ["werkzeug.debug.preserve_context"]`` will be a callable + that takes a context manager, and can be called multiple times. + Each context manager will be entered before evaluating code in the + frame, then exited again, so they can perform setup and cleanup for + each call. + + :param app: the WSGI application to run debugged. + :param evalex: enable exception evaluation feature (interactive + debugging). This requires a non-forking server. + :param request_key: The key that points to the request object in this + environment. This parameter is ignored in current + versions. + :param console_path: the URL for a general purpose console. + :param console_init_func: the function that is executed before starting + the general purpose console. The return value + is used as initial namespace. + :param show_hidden_frames: by default hidden traceback frames are skipped. + You can show them by setting this parameter + to `True`. + :param pin_security: can be used to disable the pin based security system. + :param pin_logging: enables the logging of the pin system. + + .. versionchanged:: 2.2 + Added the ``werkzeug.debug.preserve_context`` environ key. + """ + + _pin: str + _pin_cookie: str + + def __init__( + self, + app: WSGIApplication, + evalex: bool = False, + request_key: str = "werkzeug.request", + console_path: str = "/console", + console_init_func: t.Callable[[], dict[str, t.Any]] | None = None, + show_hidden_frames: bool = False, + pin_security: bool = True, + pin_logging: bool = True, + ) -> None: + if not console_init_func: + console_init_func = None + self.app = app + self.evalex = evalex + self.frames: dict[int, DebugFrameSummary | _ConsoleFrame] = {} + self.frame_contexts: dict[int, list[t.ContextManager[None]]] = {} + self.request_key = request_key + self.console_path = console_path + self.console_init_func = console_init_func + self.show_hidden_frames = show_hidden_frames + self.secret = gen_salt(20) + self._failed_pin_auth = 0 + + self.pin_logging = pin_logging + if pin_security: + # Print out the pin for the debugger on standard out. + if os.environ.get("WERKZEUG_RUN_MAIN") == "true" and pin_logging: + _log("warning", " * Debugger is active!") + if self.pin is None: + _log("warning", " * Debugger PIN disabled. DEBUGGER UNSECURED!") + else: + _log("info", " * Debugger PIN: %s", self.pin) + else: + self.pin = None + + @property + def pin(self) -> str | None: + if not hasattr(self, "_pin"): + pin_cookie = get_pin_and_cookie_name(self.app) + self._pin, self._pin_cookie = pin_cookie # type: ignore + return self._pin + + @pin.setter + def pin(self, value: str) -> None: + self._pin = value + + @property + def pin_cookie_name(self) -> str: + """The name of the pin cookie.""" + if not hasattr(self, "_pin_cookie"): + pin_cookie = get_pin_and_cookie_name(self.app) + self._pin, self._pin_cookie = pin_cookie # type: ignore + return self._pin_cookie + + def debug_application( + self, environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterator[bytes]: + """Run the application and conserve the traceback frames.""" + contexts: list[t.ContextManager[t.Any]] = [] + + if self.evalex: + environ["werkzeug.debug.preserve_context"] = contexts.append + + app_iter = None + try: + app_iter = self.app(environ, start_response) + yield from app_iter + if hasattr(app_iter, "close"): + app_iter.close() + except Exception as e: + if hasattr(app_iter, "close"): + app_iter.close() # type: ignore + + tb = DebugTraceback(e, skip=1, hide=not self.show_hidden_frames) + + for frame in tb.all_frames: + self.frames[id(frame)] = frame + self.frame_contexts[id(frame)] = contexts + + is_trusted = bool(self.check_pin_trust(environ)) + html = tb.render_debugger_html( + evalex=self.evalex, + secret=self.secret, + evalex_trusted=is_trusted, + ) + response = Response(html, status=500, mimetype="text/html") + + try: + yield from response(environ, start_response) + except Exception: + # if we end up here there has been output but an error + # occurred. in that situation we can do nothing fancy any + # more, better log something into the error log and fall + # back gracefully. + environ["wsgi.errors"].write( + "Debugging middleware caught exception in streamed " + "response at a point where response headers were already " + "sent.\n" + ) + + environ["wsgi.errors"].write("".join(tb.render_traceback_text())) + + def execute_command( # type: ignore[return] + self, + request: Request, + command: str, + frame: DebugFrameSummary | _ConsoleFrame, + ) -> Response: + """Execute a command in a console.""" + contexts = self.frame_contexts.get(id(frame), []) + + with ExitStack() as exit_stack: + for cm in contexts: + exit_stack.enter_context(cm) + + return Response(frame.eval(command), mimetype="text/html") + + def display_console(self, request: Request) -> Response: + """Display a standalone shell.""" + if 0 not in self.frames: + if self.console_init_func is None: + ns = {} + else: + ns = dict(self.console_init_func()) + ns.setdefault("app", self.app) + self.frames[0] = _ConsoleFrame(ns) + is_trusted = bool(self.check_pin_trust(request.environ)) + return Response( + render_console_html(secret=self.secret, evalex_trusted=is_trusted), + mimetype="text/html", + ) + + def get_resource(self, request: Request, filename: str) -> Response: + """Return a static resource from the shared folder.""" + path = join("shared", basename(filename)) + + try: + data = pkgutil.get_data(__package__, path) + except OSError: + return NotFound() # type: ignore[return-value] + else: + if data is None: + return NotFound() # type: ignore[return-value] + + etag = str(adler32(data) & 0xFFFFFFFF) + return send_file( + BytesIO(data), request.environ, download_name=filename, etag=etag + ) + + def check_pin_trust(self, environ: WSGIEnvironment) -> bool | None: + """Checks if the request passed the pin test. This returns `True` if the + request is trusted on a pin/cookie basis and returns `False` if not. + Additionally if the cookie's stored pin hash is wrong it will return + `None` so that appropriate action can be taken. + """ + if self.pin is None: + return True + val = parse_cookie(environ).get(self.pin_cookie_name) + if not val or "|" not in val: + return False + ts_str, pin_hash = val.split("|", 1) + + try: + ts = int(ts_str) + except ValueError: + return False + + if pin_hash != hash_pin(self.pin): + return None + return (time.time() - PIN_TIME) < ts + + def _fail_pin_auth(self) -> None: + time.sleep(5.0 if self._failed_pin_auth > 5 else 0.5) + self._failed_pin_auth += 1 + + def pin_auth(self, request: Request) -> Response: + """Authenticates with the pin.""" + exhausted = False + auth = False + trust = self.check_pin_trust(request.environ) + pin = t.cast(str, self.pin) + + # If the trust return value is `None` it means that the cookie is + # set but the stored pin hash value is bad. This means that the + # pin was changed. In this case we count a bad auth and unset the + # cookie. This way it becomes harder to guess the cookie name + # instead of the pin as we still count up failures. + bad_cookie = False + if trust is None: + self._fail_pin_auth() + bad_cookie = True + + # If we're trusted, we're authenticated. + elif trust: + auth = True + + # If we failed too many times, then we're locked out. + elif self._failed_pin_auth > 10: + exhausted = True + + # Otherwise go through pin based authentication + else: + entered_pin = request.args["pin"] + + if entered_pin.strip().replace("-", "") == pin.replace("-", ""): + self._failed_pin_auth = 0 + auth = True + else: + self._fail_pin_auth() + + rv = Response( + json.dumps({"auth": auth, "exhausted": exhausted}), + mimetype="application/json", + ) + if auth: + rv.set_cookie( + self.pin_cookie_name, + f"{int(time.time())}|{hash_pin(pin)}", + httponly=True, + samesite="Strict", + secure=request.is_secure, + ) + elif bad_cookie: + rv.delete_cookie(self.pin_cookie_name) + return rv + + def log_pin_request(self) -> Response: + """Log the pin if needed.""" + if self.pin_logging and self.pin is not None: + _log( + "info", " * To enable the debugger you need to enter the security pin:" + ) + _log("info", " * Debugger pin code: %s", self.pin) + return Response("") + + def __call__( + self, environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + """Dispatch the requests.""" + # important: don't ever access a function here that reads the incoming + # form data! Otherwise the application won't have access to that data + # any more! + request = Request(environ) + response = self.debug_application + if request.args.get("__debugger__") == "yes": + cmd = request.args.get("cmd") + arg = request.args.get("f") + secret = request.args.get("s") + frame = self.frames.get(request.args.get("frm", type=int)) # type: ignore + if cmd == "resource" and arg: + response = self.get_resource(request, arg) # type: ignore + elif cmd == "pinauth" and secret == self.secret: + response = self.pin_auth(request) # type: ignore + elif cmd == "printpin" and secret == self.secret: + response = self.log_pin_request() # type: ignore + elif ( + self.evalex + and cmd is not None + and frame is not None + and self.secret == secret + and self.check_pin_trust(environ) + ): + response = self.execute_command(request, cmd, frame) # type: ignore + elif ( + self.evalex + and self.console_path is not None + and request.path == self.console_path + ): + response = self.display_console(request) # type: ignore + return response(environ, start_response) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45fb93df1f6f52a1eb1d3ba623702f977066a500 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/console.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/console.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f60bfbea0b1d10923219d64cecdc9747d00ed18f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/console.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/repr.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/repr.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..89af6f31348927c7db8afa32d357427e264b9c66 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/repr.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7316d9cd2400565b5ecf56ef04a0848cd1b25fa6 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/debug/__pycache__/tbtools.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/console.py b/backend/test/lib/python3.8/site-packages/werkzeug/debug/console.py new file mode 100644 index 0000000000000000000000000000000000000000..03ddc07f2817e77d20e9dab578542d73a56f9730 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/debug/console.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +import code +import sys +import typing as t +from contextvars import ContextVar +from types import CodeType + +from markupsafe import escape + +from .repr import debug_repr +from .repr import dump +from .repr import helper + +_stream: ContextVar[HTMLStringO] = ContextVar("werkzeug.debug.console.stream") +_ipy: ContextVar = ContextVar("werkzeug.debug.console.ipy") + + +class HTMLStringO: + """A StringO version that HTML escapes on write.""" + + def __init__(self) -> None: + self._buffer: list[str] = [] + + def isatty(self) -> bool: + return False + + def close(self) -> None: + pass + + def flush(self) -> None: + pass + + def seek(self, n: int, mode: int = 0) -> None: + pass + + def readline(self) -> str: + if len(self._buffer) == 0: + return "" + ret = self._buffer[0] + del self._buffer[0] + return ret + + def reset(self) -> str: + val = "".join(self._buffer) + del self._buffer[:] + return val + + def _write(self, x: str) -> None: + self._buffer.append(x) + + def write(self, x: str) -> None: + self._write(escape(x)) + + def writelines(self, x: t.Iterable[str]) -> None: + self._write(escape("".join(x))) + + +class ThreadedStream: + """Thread-local wrapper for sys.stdout for the interactive console.""" + + @staticmethod + def push() -> None: + if not isinstance(sys.stdout, ThreadedStream): + sys.stdout = t.cast(t.TextIO, ThreadedStream()) + + _stream.set(HTMLStringO()) + + @staticmethod + def fetch() -> str: + try: + stream = _stream.get() + except LookupError: + return "" + + return stream.reset() + + @staticmethod + def displayhook(obj: object) -> None: + try: + stream = _stream.get() + except LookupError: + return _displayhook(obj) # type: ignore + + # stream._write bypasses escaping as debug_repr is + # already generating HTML for us. + if obj is not None: + _ipy.get().locals["_"] = obj + stream._write(debug_repr(obj)) + + def __setattr__(self, name: str, value: t.Any) -> None: + raise AttributeError(f"read only attribute {name}") + + def __dir__(self) -> list[str]: + return dir(sys.__stdout__) + + def __getattribute__(self, name: str) -> t.Any: + try: + stream = _stream.get() + except LookupError: + stream = sys.__stdout__ # type: ignore[assignment] + + return getattr(stream, name) + + def __repr__(self) -> str: + return repr(sys.__stdout__) + + +# add the threaded stream as display hook +_displayhook = sys.displayhook +sys.displayhook = ThreadedStream.displayhook + + +class _ConsoleLoader: + def __init__(self) -> None: + self._storage: dict[int, str] = {} + + def register(self, code: CodeType, source: str) -> None: + self._storage[id(code)] = source + # register code objects of wrapped functions too. + for var in code.co_consts: + if isinstance(var, CodeType): + self._storage[id(var)] = source + + def get_source_by_code(self, code: CodeType) -> str | None: + try: + return self._storage[id(code)] + except KeyError: + return None + + +class _InteractiveConsole(code.InteractiveInterpreter): + locals: dict[str, t.Any] + + def __init__(self, globals: dict[str, t.Any], locals: dict[str, t.Any]) -> None: + self.loader = _ConsoleLoader() + locals = { + **globals, + **locals, + "dump": dump, + "help": helper, + "__loader__": self.loader, + } + super().__init__(locals) + original_compile = self.compile + + def compile(source: str, filename: str, symbol: str) -> CodeType | None: + code = original_compile(source, filename, symbol) + + if code is not None: + self.loader.register(code, source) + + return code + + self.compile = compile # type: ignore[assignment] + self.more = False + self.buffer: list[str] = [] + + def runsource(self, source: str, **kwargs: t.Any) -> str: # type: ignore + source = f"{source.rstrip()}\n" + ThreadedStream.push() + prompt = "... " if self.more else ">>> " + try: + source_to_eval = "".join(self.buffer + [source]) + if super().runsource(source_to_eval, "<debugger>", "single"): + self.more = True + self.buffer.append(source) + else: + self.more = False + del self.buffer[:] + finally: + output = ThreadedStream.fetch() + return f"{prompt}{escape(source)}{output}" + + def runcode(self, code: CodeType) -> None: + try: + exec(code, self.locals) + except Exception: + self.showtraceback() + + def showtraceback(self) -> None: + from .tbtools import DebugTraceback + + exc = t.cast(BaseException, sys.exc_info()[1]) + te = DebugTraceback(exc, skip=1) + sys.stdout._write(te.render_traceback_html()) # type: ignore + + def showsyntaxerror(self, filename: str | None = None) -> None: + from .tbtools import DebugTraceback + + exc = t.cast(BaseException, sys.exc_info()[1]) + te = DebugTraceback(exc, skip=4) + sys.stdout._write(te.render_traceback_html()) # type: ignore + + def write(self, data: str) -> None: + sys.stdout.write(data) + + +class Console: + """An interactive console.""" + + def __init__( + self, + globals: dict[str, t.Any] | None = None, + locals: dict[str, t.Any] | None = None, + ) -> None: + if locals is None: + locals = {} + if globals is None: + globals = {} + self._ipy = _InteractiveConsole(globals, locals) + + def eval(self, code: str) -> str: + _ipy.set(self._ipy) + old_sys_stdout = sys.stdout + try: + return self._ipy.runsource(code) + finally: + sys.stdout = old_sys_stdout diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/repr.py b/backend/test/lib/python3.8/site-packages/werkzeug/debug/repr.py new file mode 100644 index 0000000000000000000000000000000000000000..3bf15a77a195891e8bd90d92970b828a5e04bbe4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/debug/repr.py @@ -0,0 +1,283 @@ +"""Object representations for debugging purposes. Unlike the default +repr, these expose more information and produce HTML instead of ASCII. + +Together with the CSS and JavaScript of the debugger this gives a +colorful and more compact output. +""" +from __future__ import annotations + +import codecs +import re +import sys +import typing as t +from collections import deque +from traceback import format_exception_only + +from markupsafe import escape + +missing = object() +_paragraph_re = re.compile(r"(?:\r\n|\r|\n){2,}") +RegexType = type(_paragraph_re) + +HELP_HTML = """\ +<div class=box> + <h3>%(title)s</h3> + <pre class=help>%(text)s</pre> +</div>\ +""" +OBJECT_DUMP_HTML = """\ +<div class=box> + <h3>%(title)s</h3> + %(repr)s + <table>%(items)s</table> +</div>\ +""" + + +def debug_repr(obj: object) -> str: + """Creates a debug repr of an object as HTML string.""" + return DebugReprGenerator().repr(obj) + + +def dump(obj: object = missing) -> None: + """Print the object details to stdout._write (for the interactive + console of the web debugger. + """ + gen = DebugReprGenerator() + if obj is missing: + rv = gen.dump_locals(sys._getframe(1).f_locals) + else: + rv = gen.dump_object(obj) + sys.stdout._write(rv) # type: ignore + + +class _Helper: + """Displays an HTML version of the normal help, for the interactive + debugger only because it requires a patched sys.stdout. + """ + + def __repr__(self) -> str: + return "Type help(object) for help about object." + + def __call__(self, topic: t.Any | None = None) -> None: + if topic is None: + sys.stdout._write(f"<span class=help>{self!r}</span>") # type: ignore + return + import pydoc + + pydoc.help(topic) + rv = sys.stdout.reset() # type: ignore + paragraphs = _paragraph_re.split(rv) + if len(paragraphs) > 1: + title = paragraphs[0] + text = "\n\n".join(paragraphs[1:]) + else: + title = "Help" + text = paragraphs[0] + sys.stdout._write(HELP_HTML % {"title": title, "text": text}) # type: ignore + + +helper = _Helper() + + +def _add_subclass_info( + inner: str, obj: object, base: t.Type | tuple[t.Type, ...] +) -> str: + if isinstance(base, tuple): + for cls in base: + if type(obj) is cls: + return inner + elif type(obj) is base: + return inner + module = "" + if obj.__class__.__module__ not in ("__builtin__", "exceptions"): + module = f'<span class="module">{obj.__class__.__module__}.</span>' + return f"{module}{type(obj).__name__}({inner})" + + +def _sequence_repr_maker( + left: str, right: str, base: t.Type, limit: int = 8 +) -> t.Callable[[DebugReprGenerator, t.Iterable, bool], str]: + def proxy(self: DebugReprGenerator, obj: t.Iterable, recursive: bool) -> str: + if recursive: + return _add_subclass_info(f"{left}...{right}", obj, base) + buf = [left] + have_extended_section = False + for idx, item in enumerate(obj): + if idx: + buf.append(", ") + if idx == limit: + buf.append('<span class="extended">') + have_extended_section = True + buf.append(self.repr(item)) + if have_extended_section: + buf.append("</span>") + buf.append(right) + return _add_subclass_info("".join(buf), obj, base) + + return proxy + + +class DebugReprGenerator: + def __init__(self) -> None: + self._stack: list[t.Any] = [] + + list_repr = _sequence_repr_maker("[", "]", list) + tuple_repr = _sequence_repr_maker("(", ")", tuple) + set_repr = _sequence_repr_maker("set([", "])", set) + frozenset_repr = _sequence_repr_maker("frozenset([", "])", frozenset) + deque_repr = _sequence_repr_maker( + '<span class="module">collections.</span>deque([', "])", deque + ) + + def regex_repr(self, obj: t.Pattern) -> str: + pattern = repr(obj.pattern) + pattern = codecs.decode(pattern, "unicode-escape", "ignore") + pattern = f"r{pattern}" + return f're.compile(<span class="string regex">{pattern}</span>)' + + def string_repr(self, obj: str | bytes, limit: int = 70) -> str: + buf = ['<span class="string">'] + r = repr(obj) + + # shorten the repr when the hidden part would be at least 3 chars + if len(r) - limit > 2: + buf.extend( + ( + escape(r[:limit]), + '<span class="extended">', + escape(r[limit:]), + "</span>", + ) + ) + else: + buf.append(escape(r)) + + buf.append("</span>") + out = "".join(buf) + + # if the repr looks like a standard string, add subclass info if needed + if r[0] in "'\"" or (r[0] == "b" and r[1] in "'\""): + return _add_subclass_info(out, obj, (bytes, str)) + + # otherwise, assume the repr distinguishes the subclass already + return out + + def dict_repr( + self, + d: dict[int, None] | dict[str, int] | dict[str | int, int], + recursive: bool, + limit: int = 5, + ) -> str: + if recursive: + return _add_subclass_info("{...}", d, dict) + buf = ["{"] + have_extended_section = False + for idx, (key, value) in enumerate(d.items()): + if idx: + buf.append(", ") + if idx == limit - 1: + buf.append('<span class="extended">') + have_extended_section = True + buf.append( + f'<span class="pair"><span class="key">{self.repr(key)}</span>:' + f' <span class="value">{self.repr(value)}</span></span>' + ) + if have_extended_section: + buf.append("</span>") + buf.append("}") + return _add_subclass_info("".join(buf), d, dict) + + def object_repr(self, obj: type[dict] | t.Callable | type[list] | None) -> str: + r = repr(obj) + return f'<span class="object">{escape(r)}</span>' + + def dispatch_repr(self, obj: t.Any, recursive: bool) -> str: + if obj is helper: + return f'<span class="help">{helper!r}</span>' + if isinstance(obj, (int, float, complex)): + return f'<span class="number">{obj!r}</span>' + if isinstance(obj, str) or isinstance(obj, bytes): + return self.string_repr(obj) + if isinstance(obj, RegexType): + return self.regex_repr(obj) + if isinstance(obj, list): + return self.list_repr(obj, recursive) + if isinstance(obj, tuple): + return self.tuple_repr(obj, recursive) + if isinstance(obj, set): + return self.set_repr(obj, recursive) + if isinstance(obj, frozenset): + return self.frozenset_repr(obj, recursive) + if isinstance(obj, dict): + return self.dict_repr(obj, recursive) + if isinstance(obj, deque): + return self.deque_repr(obj, recursive) + return self.object_repr(obj) + + def fallback_repr(self) -> str: + try: + info = "".join(format_exception_only(*sys.exc_info()[:2])) + except Exception: + info = "?" + return ( + '<span class="brokenrepr">' + f"<broken repr ({escape(info.strip())})></span>" + ) + + def repr(self, obj: object) -> str: + recursive = False + for item in self._stack: + if item is obj: + recursive = True + break + self._stack.append(obj) + try: + try: + return self.dispatch_repr(obj, recursive) + except Exception: + return self.fallback_repr() + finally: + self._stack.pop() + + def dump_object(self, obj: object) -> str: + repr = None + items: list[tuple[str, str]] | None = None + + if isinstance(obj, dict): + title = "Contents of" + items = [] + for key, value in obj.items(): + if not isinstance(key, str): + items = None + break + items.append((key, self.repr(value))) + if items is None: + items = [] + repr = self.repr(obj) + for key in dir(obj): + try: + items.append((key, self.repr(getattr(obj, key)))) + except Exception: + pass + title = "Details for" + title += f" {object.__repr__(obj)[1:-1]}" + return self.render_object_dump(items, title, repr) + + def dump_locals(self, d: dict[str, t.Any]) -> str: + items = [(key, self.repr(value)) for key, value in d.items()] + return self.render_object_dump(items, "Local variables in frame") + + def render_object_dump( + self, items: list[tuple[str, str]], title: str, repr: str | None = None + ) -> str: + html_items = [] + for key, value in items: + html_items.append(f"<tr><th>{escape(key)}<td><pre class=repr>{value}</pre>") + if not html_items: + html_items.append("<tr><td><em>Nothing</em>") + return OBJECT_DUMP_HTML % { + "title": escape(title), + "repr": f"<pre class=repr>{repr if repr else ''}</pre>", + "items": "\n".join(html_items), + } diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/ICON_LICENSE.md b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/ICON_LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..3bdbfc739ea9c4d80d02f1a495cb0ee2f7be9b97 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/ICON_LICENSE.md @@ -0,0 +1,6 @@ +Silk icon set 1.3 by Mark James <mjames@gmail.com> + +http://www.famfamfam.com/lab/icons/silk/ + +License: [CC-BY-2.5](https://creativecommons.org/licenses/by/2.5/) +or [CC-BY-3.0](https://creativecommons.org/licenses/by/3.0/) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/console.png b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/console.png new file mode 100644 index 0000000000000000000000000000000000000000..c28dd63812d80e416682f835652f8e5824bdccb2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/console.png differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/debugger.js b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/debugger.js new file mode 100644 index 0000000000000000000000000000000000000000..f463e9c77e16202e35c0a6bf7e8c60c29abaa807 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/debugger.js @@ -0,0 +1,360 @@ +docReady(() => { + if (!EVALEX_TRUSTED) { + initPinBox(); + } + // if we are in console mode, show the console. + if (CONSOLE_MODE && EVALEX) { + createInteractiveConsole(); + } + + const frames = document.querySelectorAll("div.traceback div.frame"); + if (EVALEX) { + addConsoleIconToFrames(frames); + } + addEventListenersToElements(document.querySelectorAll("div.detail"), "click", () => + document.querySelector("div.traceback").scrollIntoView(false) + ); + addToggleFrameTraceback(frames); + addToggleTraceTypesOnClick(document.querySelectorAll("h2.traceback")); + addInfoPrompt(document.querySelectorAll("span.nojavascript")); + wrapPlainTraceback(); +}); + +function addToggleFrameTraceback(frames) { + frames.forEach((frame) => { + frame.addEventListener("click", () => { + frame.getElementsByTagName("pre")[0].parentElement.classList.toggle("expanded"); + }); + }) +} + + +function wrapPlainTraceback() { + const plainTraceback = document.querySelector("div.plain textarea"); + const wrapper = document.createElement("pre"); + const textNode = document.createTextNode(plainTraceback.textContent); + wrapper.appendChild(textNode); + plainTraceback.replaceWith(wrapper); +} + +function initPinBox() { + document.querySelector(".pin-prompt form").addEventListener( + "submit", + function (event) { + event.preventDefault(); + const pin = encodeURIComponent(this.pin.value); + const encodedSecret = encodeURIComponent(SECRET); + const btn = this.btn; + btn.disabled = true; + + fetch( + `${document.location.pathname}?__debugger__=yes&cmd=pinauth&pin=${pin}&s=${encodedSecret}` + ) + .then((res) => res.json()) + .then(({auth, exhausted}) => { + if (auth) { + EVALEX_TRUSTED = true; + fadeOut(document.getElementsByClassName("pin-prompt")[0]); + } else { + alert( + `Error: ${ + exhausted + ? "too many attempts. Restart server to retry." + : "incorrect pin" + }` + ); + } + }) + .catch((err) => { + alert("Error: Could not verify PIN. Network error?"); + console.error(err); + }) + .finally(() => (btn.disabled = false)); + }, + false + ); +} + +function promptForPin() { + if (!EVALEX_TRUSTED) { + const encodedSecret = encodeURIComponent(SECRET); + fetch( + `${document.location.pathname}?__debugger__=yes&cmd=printpin&s=${encodedSecret}` + ); + const pinPrompt = document.getElementsByClassName("pin-prompt")[0]; + fadeIn(pinPrompt); + document.querySelector('.pin-prompt input[name="pin"]').focus(); + } +} + +/** + * Helper function for shell initialization + */ +function openShell(consoleNode, target, frameID) { + promptForPin(); + if (consoleNode) { + slideToggle(consoleNode); + return consoleNode; + } + let historyPos = 0; + const history = [""]; + const consoleElement = createConsole(); + const output = createConsoleOutput(); + const form = createConsoleInputForm(); + const command = createConsoleInput(); + + target.parentNode.appendChild(consoleElement); + consoleElement.append(output); + consoleElement.append(form); + form.append(command); + command.focus(); + slideToggle(consoleElement); + + form.addEventListener("submit", (e) => { + handleConsoleSubmit(e, command, frameID).then((consoleOutput) => { + output.append(consoleOutput); + command.focus(); + consoleElement.scrollTo(0, consoleElement.scrollHeight); + const old = history.pop(); + history.push(command.value); + if (typeof old !== "undefined") { + history.push(old); + } + historyPos = history.length - 1; + command.value = ""; + }); + }); + + command.addEventListener("keydown", (e) => { + if (e.key === "l" && e.ctrlKey) { + output.innerText = "--- screen cleared ---"; + } else if (e.key === "ArrowUp" || e.key === "ArrowDown") { + // Handle up arrow and down arrow. + if (e.key === "ArrowUp" && historyPos > 0) { + e.preventDefault(); + historyPos--; + } else if (e.key === "ArrowDown" && historyPos < history.length - 1) { + historyPos++; + } + command.value = history[historyPos]; + } + return false; + }); + + return consoleElement; +} + +function addEventListenersToElements(elements, event, listener) { + elements.forEach((el) => el.addEventListener(event, listener)); +} + +/** + * Add extra info + */ +function addInfoPrompt(elements) { + for (let i = 0; i < elements.length; i++) { + elements[i].innerHTML = + "<p>To switch between the interactive traceback and the plaintext " + + 'one, you can click on the "Traceback" headline. From the text ' + + "traceback you can also create a paste of it. " + + (!EVALEX + ? "" + : "For code execution mouse-over the frame you want to debug and " + + "click on the console icon on the right side." + + "<p>You can execute arbitrary Python code in the stack frames and " + + "there are some extra helpers available for introspection:" + + "<ul><li><code>dump()</code> shows all variables in the frame" + + "<li><code>dump(obj)</code> dumps all that's known about the object</ul>"); + elements[i].classList.remove("nojavascript"); + } +} + +function addConsoleIconToFrames(frames) { + for (let i = 0; i < frames.length; i++) { + let consoleNode = null; + const target = frames[i]; + const frameID = frames[i].id.substring(6); + + for (let j = 0; j < target.getElementsByTagName("pre").length; j++) { + const img = createIconForConsole(); + img.addEventListener("click", (e) => { + e.stopPropagation(); + consoleNode = openShell(consoleNode, target, frameID); + return false; + }); + target.getElementsByTagName("pre")[j].append(img); + } + } +} + +function slideToggle(target) { + target.classList.toggle("active"); +} + +/** + * toggle traceback types on click. + */ +function addToggleTraceTypesOnClick(elements) { + for (let i = 0; i < elements.length; i++) { + elements[i].addEventListener("click", () => { + document.querySelector("div.traceback").classList.toggle("hidden"); + document.querySelector("div.plain").classList.toggle("hidden"); + }); + elements[i].style.cursor = "pointer"; + document.querySelector("div.plain").classList.toggle("hidden"); + } +} + +function createConsole() { + const consoleNode = document.createElement("pre"); + consoleNode.classList.add("console"); + consoleNode.classList.add("active"); + return consoleNode; +} + +function createConsoleOutput() { + const output = document.createElement("div"); + output.classList.add("output"); + output.innerHTML = "[console ready]"; + return output; +} + +function createConsoleInputForm() { + const form = document.createElement("form"); + form.innerHTML = ">>> "; + return form; +} + +function createConsoleInput() { + const command = document.createElement("input"); + command.type = "text"; + command.setAttribute("autocomplete", "off"); + command.setAttribute("spellcheck", false); + command.setAttribute("autocapitalize", "off"); + command.setAttribute("autocorrect", "off"); + return command; +} + +function createIconForConsole() { + const img = document.createElement("img"); + img.setAttribute("src", "?__debugger__=yes&cmd=resource&f=console.png"); + img.setAttribute("title", "Open an interactive python shell in this frame"); + return img; +} + +function createExpansionButtonForConsole() { + const expansionButton = document.createElement("a"); + expansionButton.setAttribute("href", "#"); + expansionButton.setAttribute("class", "toggle"); + expansionButton.innerHTML = " "; + return expansionButton; +} + +function createInteractiveConsole() { + const target = document.querySelector("div.console div.inner"); + while (target.firstChild) { + target.removeChild(target.firstChild); + } + openShell(null, target, 0); +} + +function handleConsoleSubmit(e, command, frameID) { + // Prevent page from refreshing. + e.preventDefault(); + + return new Promise((resolve) => { + // Get input command. + const cmd = command.value; + + // Setup GET request. + const urlPath = ""; + const params = { + __debugger__: "yes", + cmd: cmd, + frm: frameID, + s: SECRET, + }; + const paramString = Object.keys(params) + .map((key) => { + return "&" + encodeURIComponent(key) + "=" + encodeURIComponent(params[key]); + }) + .join(""); + + fetch(urlPath + "?" + paramString) + .then((res) => { + return res.text(); + }) + .then((data) => { + const tmp = document.createElement("div"); + tmp.innerHTML = data; + resolve(tmp); + + // Handle expandable span for long list outputs. + // Example to test: list(range(13)) + let wrapperAdded = false; + const wrapperSpan = document.createElement("span"); + const expansionButton = createExpansionButtonForConsole(); + + tmp.querySelectorAll("span.extended").forEach((spanToWrap) => { + const parentDiv = spanToWrap.parentNode; + if (!wrapperAdded) { + parentDiv.insertBefore(wrapperSpan, spanToWrap); + wrapperAdded = true; + } + parentDiv.removeChild(spanToWrap); + wrapperSpan.append(spanToWrap); + spanToWrap.hidden = true; + + expansionButton.addEventListener("click", (event) => { + event.preventDefault(); + spanToWrap.hidden = !spanToWrap.hidden; + expansionButton.classList.toggle("open"); + return false; + }); + }); + + // Add expansion button at end of wrapper. + if (wrapperAdded) { + wrapperSpan.append(expansionButton); + } + }) + .catch((err) => { + console.error(err); + }); + return false; + }); +} + +function fadeOut(element) { + element.style.opacity = 1; + + (function fade() { + element.style.opacity -= 0.1; + if (element.style.opacity < 0) { + element.style.display = "none"; + } else { + requestAnimationFrame(fade); + } + })(); +} + +function fadeIn(element, display) { + element.style.opacity = 0; + element.style.display = display || "block"; + + (function fade() { + let val = parseFloat(element.style.opacity) + 0.1; + if (val <= 1) { + element.style.opacity = val; + requestAnimationFrame(fade); + } + })(); +} + +function docReady(fn) { + if (document.readyState === "complete" || document.readyState === "interactive") { + setTimeout(fn, 1); + } else { + document.addEventListener("DOMContentLoaded", fn); + } +} diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/less.png b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/less.png new file mode 100644 index 0000000000000000000000000000000000000000..5efefd62b43e4f11dd300be4355a4b413c7a70d2 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/less.png differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/more.png b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/more.png new file mode 100644 index 0000000000000000000000000000000000000000..804fa226fe3ed9e6cc2bd044a848f33a2d7b4e4f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/more.png differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/style.css b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/style.css new file mode 100644 index 0000000000000000000000000000000000000000..e9397ca0a1b6c26f30cb28fc81510a48fc46ede9 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/debug/shared/style.css @@ -0,0 +1,150 @@ +body, input { font-family: sans-serif; color: #000; text-align: center; + margin: 1em; padding: 0; font-size: 15px; } +h1, h2, h3 { font-weight: normal; } + +input { background-color: #fff; margin: 0; text-align: left; + outline: none !important; } +input[type="submit"] { padding: 3px 6px; } +a { color: #11557C; } +a:hover { color: #177199; } +pre, code, +textarea { font-family: monospace; font-size: 14px; } + +div.debugger { text-align: left; padding: 12px; margin: auto; + background-color: white; } +h1 { font-size: 36px; margin: 0 0 0.3em 0; } +div.detail { cursor: pointer; } +div.detail p { margin: 0 0 8px 13px; font-size: 14px; white-space: pre-wrap; + font-family: monospace; } +div.explanation { margin: 20px 13px; font-size: 15px; color: #555; } +div.footer { font-size: 13px; text-align: right; margin: 30px 0; + color: #86989B; } + +h2 { font-size: 16px; margin: 1.3em 0 0.0 0; padding: 9px; + background-color: #11557C; color: white; } +h2 em, h3 em { font-style: normal; color: #A5D6D9; font-weight: normal; } + +div.traceback, div.plain { border: 1px solid #ddd; margin: 0 0 1em 0; padding: 10px; } +div.plain p { margin: 0; } +div.plain textarea, +div.plain pre { margin: 10px 0 0 0; padding: 4px; + background-color: #E8EFF0; border: 1px solid #D3E7E9; } +div.plain textarea { width: 99%; height: 300px; } +div.traceback h3 { font-size: 1em; margin: 0 0 0.8em 0; } +div.traceback ul { list-style: none; margin: 0; padding: 0 0 0 1em; } +div.traceback h4 { font-size: 13px; font-weight: normal; margin: 0.7em 0 0.1em 0; } +div.traceback pre { margin: 0; padding: 5px 0 3px 15px; + background-color: #E8EFF0; border: 1px solid #D3E7E9; } +div.traceback .library .current { background: white; color: #555; } +div.traceback .expanded .current { background: #E8EFF0; color: black; } +div.traceback pre:hover { background-color: #DDECEE; color: black; cursor: pointer; } +div.traceback div.source.expanded pre + pre { border-top: none; } + +div.traceback span.ws { display: none; } +div.traceback pre.before, div.traceback pre.after { display: none; background: white; } +div.traceback div.source.expanded pre.before, +div.traceback div.source.expanded pre.after { + display: block; +} + +div.traceback div.source.expanded span.ws { + display: inline; +} + +div.traceback blockquote { margin: 1em 0 0 0; padding: 0; white-space: pre-line; } +div.traceback img { float: right; padding: 2px; margin: -3px 2px 0 0; display: none; } +div.traceback img:hover { background-color: #ddd; cursor: pointer; + border-color: #BFDDE0; } +div.traceback pre:hover img { display: block; } +div.traceback cite.filename { font-style: normal; color: #3B666B; } + +pre.console { border: 1px solid #ccc; background: white!important; + color: black; padding: 5px!important; + margin: 3px 0 0 0!important; cursor: default!important; + max-height: 400px; overflow: auto; } +pre.console form { color: #555; } +pre.console input { background-color: transparent; color: #555; + width: 90%; font-family: monospace; font-size: 14px; + border: none!important; } + +span.string { color: #30799B; } +span.number { color: #9C1A1C; } +span.help { color: #3A7734; } +span.object { color: #485F6E; } +span.extended { opacity: 0.5; } +span.extended:hover { opacity: 1; } +a.toggle { text-decoration: none; background-repeat: no-repeat; + background-position: center center; + background-image: url(?__debugger__=yes&cmd=resource&f=more.png); } +a.toggle:hover { background-color: #444; } +a.open { background-image: url(?__debugger__=yes&cmd=resource&f=less.png); } + +pre.console div.traceback, +pre.console div.box { margin: 5px 10px; white-space: normal; + border: 1px solid #11557C; padding: 10px; + font-family: sans-serif; } +pre.console div.box h3, +pre.console div.traceback h3 { margin: -10px -10px 10px -10px; padding: 5px; + background: #11557C; color: white; } + +pre.console div.traceback pre:hover { cursor: default; background: #E8EFF0; } +pre.console div.traceback pre.syntaxerror { background: inherit; border: none; + margin: 20px -10px -10px -10px; + padding: 10px; border-top: 1px solid #BFDDE0; + background: #E8EFF0; } +pre.console div.noframe-traceback pre.syntaxerror { margin-top: -10px; border: none; } + +pre.console div.box pre.repr { padding: 0; margin: 0; background-color: white; border: none; } +pre.console div.box table { margin-top: 6px; } +pre.console div.box pre { border: none; } +pre.console div.box pre.help { background-color: white; } +pre.console div.box pre.help:hover { cursor: default; } +pre.console table tr { vertical-align: top; } +div.console { border: 1px solid #ccc; padding: 4px; background-color: #fafafa; } + +div.traceback pre, div.console pre { + white-space: pre-wrap; /* css-3 should we be so lucky... */ + white-space: -moz-pre-wrap; /* Mozilla, since 1999 */ + white-space: -pre-wrap; /* Opera 4-6 ?? */ + white-space: -o-pre-wrap; /* Opera 7 ?? */ + word-wrap: break-word; /* Internet Explorer 5.5+ */ + _white-space: pre; /* IE only hack to re-specify in + addition to word-wrap */ +} + + +div.pin-prompt { + position: absolute; + display: none; + top: 0; + bottom: 0; + left: 0; + right: 0; + background: rgba(255, 255, 255, 0.8); +} + +div.pin-prompt .inner { + background: #eee; + padding: 10px 50px; + width: 350px; + margin: 10% auto 0 auto; + border: 1px solid #ccc; + border-radius: 2px; +} + +div.exc-divider { + margin: 0.7em 0 0 -1em; + padding: 0.5em; + background: #11557C; + color: #ddd; + border: 1px solid #ddd; +} + +.console.active { + max-height: 0!important; + display: none; +} + +.hidden { + display: none; +} diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/debug/tbtools.py b/backend/test/lib/python3.8/site-packages/werkzeug/debug/tbtools.py new file mode 100644 index 0000000000000000000000000000000000000000..c45f56ef08a8746246f678374a106929b3b42c8b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/debug/tbtools.py @@ -0,0 +1,437 @@ +from __future__ import annotations + +import itertools +import linecache +import os +import re +import sys +import sysconfig +import traceback +import typing as t + +from markupsafe import escape + +from ..utils import cached_property +from .console import Console + +HEADER = """\ +<!doctype html> +<html lang=en> + <head> + <title>%(title)s // Werkzeug Debugger</title> + <link rel="stylesheet" href="?__debugger__=yes&cmd=resource&f=style.css"> + <link rel="shortcut icon" + href="?__debugger__=yes&cmd=resource&f=console.png"> + <script src="?__debugger__=yes&cmd=resource&f=debugger.js"></script> + <script> + var CONSOLE_MODE = %(console)s, + EVALEX = %(evalex)s, + EVALEX_TRUSTED = %(evalex_trusted)s, + SECRET = "%(secret)s"; + </script> + </head> + <body style="background-color: #fff"> + <div class="debugger"> +""" + +FOOTER = """\ + <div class="footer"> + Brought to you by <strong class="arthur">DON'T PANIC</strong>, your + friendly Werkzeug powered traceback interpreter. + </div> + </div> + + <div class="pin-prompt"> + <div class="inner"> + <h3>Console Locked</h3> + <p> + The console is locked and needs to be unlocked by entering the PIN. + You can find the PIN printed out on the standard output of your + shell that runs the server. + <form> + <p>PIN: + <input type=text name=pin size=14> + <input type=submit name=btn value="Confirm Pin"> + </form> + </div> + </div> + </body> +</html> +""" + +PAGE_HTML = ( + HEADER + + """\ +<h1>%(exception_type)s</h1> +<div class="detail"> + <p class="errormsg">%(exception)s</p> +</div> +<h2 class="traceback">Traceback <em>(most recent call last)</em></h2> +%(summary)s +<div class="plain"> + <p> + This is the Copy/Paste friendly version of the traceback. + </p> + <textarea cols="50" rows="10" name="code" readonly>%(plaintext)s</textarea> +</div> +<div class="explanation"> + The debugger caught an exception in your WSGI application. You can now + look at the traceback which led to the error. <span class="nojavascript"> + If you enable JavaScript you can also use additional features such as code + execution (if the evalex feature is enabled), automatic pasting of the + exceptions and much more.</span> +</div> +""" + + FOOTER + + """ +<!-- + +%(plaintext_cs)s + +--> +""" +) + +CONSOLE_HTML = ( + HEADER + + """\ +<h1>Interactive Console</h1> +<div class="explanation"> +In this console you can execute Python expressions in the context of the +application. The initial namespace was created by the debugger automatically. +</div> +<div class="console"><div class="inner">The Console requires JavaScript.</div></div> +""" + + FOOTER +) + +SUMMARY_HTML = """\ +<div class="%(classes)s"> + %(title)s + <ul>%(frames)s</ul> + %(description)s +</div> +""" + +FRAME_HTML = """\ +<div class="frame" id="frame-%(id)d"> + <h4>File <cite class="filename">"%(filename)s"</cite>, + line <em class="line">%(lineno)s</em>, + in <code class="function">%(function_name)s</code></h4> + <div class="source %(library)s">%(lines)s</div> +</div> +""" + + +def _process_traceback( + exc: BaseException, + te: traceback.TracebackException | None = None, + *, + skip: int = 0, + hide: bool = True, +) -> traceback.TracebackException: + if te is None: + te = traceback.TracebackException.from_exception(exc, lookup_lines=False) + + # Get the frames the same way StackSummary.extract did, in order + # to match each frame with the FrameSummary to augment. + frame_gen = traceback.walk_tb(exc.__traceback__) + limit = getattr(sys, "tracebacklimit", None) + + if limit is not None: + if limit < 0: + limit = 0 + + frame_gen = itertools.islice(frame_gen, limit) + + if skip: + frame_gen = itertools.islice(frame_gen, skip, None) + del te.stack[:skip] + + new_stack: list[DebugFrameSummary] = [] + hidden = False + + # Match each frame with the FrameSummary that was generated. + # Hide frames using Paste's __traceback_hide__ rules. Replace + # all visible FrameSummary with DebugFrameSummary. + for (f, _), fs in zip(frame_gen, te.stack): + if hide: + hide_value = f.f_locals.get("__traceback_hide__", False) + + if hide_value in {"before", "before_and_this"}: + new_stack = [] + hidden = False + + if hide_value == "before_and_this": + continue + elif hide_value in {"reset", "reset_and_this"}: + hidden = False + + if hide_value == "reset_and_this": + continue + elif hide_value in {"after", "after_and_this"}: + hidden = True + + if hide_value == "after_and_this": + continue + elif hide_value or hidden: + continue + + frame_args: dict[str, t.Any] = { + "filename": fs.filename, + "lineno": fs.lineno, + "name": fs.name, + "locals": f.f_locals, + "globals": f.f_globals, + } + + if hasattr(fs, "colno"): + frame_args["colno"] = fs.colno + frame_args["end_colno"] = fs.end_colno # type: ignore[attr-defined] + + new_stack.append(DebugFrameSummary(**frame_args)) + + # The codeop module is used to compile code from the interactive + # debugger. Hide any codeop frames from the bottom of the traceback. + while new_stack: + module = new_stack[0].global_ns.get("__name__") + + if module is None: + module = new_stack[0].local_ns.get("__name__") + + if module == "codeop": + del new_stack[0] + else: + break + + te.stack[:] = new_stack + + if te.__context__: + context_exc = t.cast(BaseException, exc.__context__) + te.__context__ = _process_traceback(context_exc, te.__context__, hide=hide) + + if te.__cause__: + cause_exc = t.cast(BaseException, exc.__cause__) + te.__cause__ = _process_traceback(cause_exc, te.__cause__, hide=hide) + + return te + + +class DebugTraceback: + __slots__ = ("_te", "_cache_all_tracebacks", "_cache_all_frames") + + def __init__( + self, + exc: BaseException, + te: traceback.TracebackException | None = None, + *, + skip: int = 0, + hide: bool = True, + ) -> None: + self._te = _process_traceback(exc, te, skip=skip, hide=hide) + + def __str__(self) -> str: + return f"<{type(self).__name__} {self._te}>" + + @cached_property + def all_tracebacks( + self, + ) -> list[tuple[str | None, traceback.TracebackException]]: + out = [] + current = self._te + + while current is not None: + if current.__cause__ is not None: + chained_msg = ( + "The above exception was the direct cause of the" + " following exception" + ) + chained_exc = current.__cause__ + elif current.__context__ is not None and not current.__suppress_context__: + chained_msg = ( + "During handling of the above exception, another" + " exception occurred" + ) + chained_exc = current.__context__ + else: + chained_msg = None + chained_exc = None + + out.append((chained_msg, current)) + current = chained_exc + + return out + + @cached_property + def all_frames(self) -> list[DebugFrameSummary]: + return [ + f for _, te in self.all_tracebacks for f in te.stack # type: ignore[misc] + ] + + def render_traceback_text(self) -> str: + return "".join(self._te.format()) + + def render_traceback_html(self, include_title: bool = True) -> str: + library_frames = [f.is_library for f in self.all_frames] + mark_library = 0 < sum(library_frames) < len(library_frames) + rows = [] + + if not library_frames: + classes = "traceback noframe-traceback" + else: + classes = "traceback" + + for msg, current in reversed(self.all_tracebacks): + row_parts = [] + + if msg is not None: + row_parts.append(f'<li><div class="exc-divider">{msg}:</div>') + + for frame in current.stack: + frame = t.cast(DebugFrameSummary, frame) + info = f' title="{escape(frame.info)}"' if frame.info else "" + row_parts.append(f"<li{info}>{frame.render_html(mark_library)}") + + rows.append("\n".join(row_parts)) + + is_syntax_error = issubclass(self._te.exc_type, SyntaxError) + + if include_title: + if is_syntax_error: + title = "Syntax Error" + else: + title = "Traceback <em>(most recent call last)</em>:" + else: + title = "" + + exc_full = escape("".join(self._te.format_exception_only())) + + if is_syntax_error: + description = f"<pre class=syntaxerror>{exc_full}</pre>" + else: + description = f"<blockquote>{exc_full}</blockquote>" + + return SUMMARY_HTML % { + "classes": classes, + "title": f"<h3>{title}</h3>", + "frames": "\n".join(rows), + "description": description, + } + + def render_debugger_html( + self, evalex: bool, secret: str, evalex_trusted: bool + ) -> str: + exc_lines = list(self._te.format_exception_only()) + plaintext = "".join(self._te.format()) + return PAGE_HTML % { + "evalex": "true" if evalex else "false", + "evalex_trusted": "true" if evalex_trusted else "false", + "console": "false", + "title": escape(exc_lines[0]), + "exception": escape("".join(exc_lines)), + "exception_type": escape(self._te.exc_type.__name__), + "summary": self.render_traceback_html(include_title=False), + "plaintext": escape(plaintext), + "plaintext_cs": re.sub("-{2,}", "-", plaintext), + "secret": secret, + } + + +class DebugFrameSummary(traceback.FrameSummary): + """A :class:`traceback.FrameSummary` that can evaluate code in the + frame's namespace. + """ + + __slots__ = ( + "local_ns", + "global_ns", + "_cache_info", + "_cache_is_library", + "_cache_console", + ) + + def __init__( + self, + *, + locals: dict[str, t.Any], + globals: dict[str, t.Any], + **kwargs: t.Any, + ) -> None: + super().__init__(locals=None, **kwargs) + self.local_ns = locals + self.global_ns = globals + + @cached_property + def info(self) -> str | None: + return self.local_ns.get("__traceback_info__") + + @cached_property + def is_library(self) -> bool: + return any( + self.filename.startswith((path, os.path.realpath(path))) + for path in sysconfig.get_paths().values() + ) + + @cached_property + def console(self) -> Console: + return Console(self.global_ns, self.local_ns) + + def eval(self, code: str) -> t.Any: + return self.console.eval(code) + + def render_html(self, mark_library: bool) -> str: + context = 5 + lines = linecache.getlines(self.filename) + line_idx = self.lineno - 1 # type: ignore[operator] + start_idx = max(0, line_idx - context) + stop_idx = min(len(lines), line_idx + context + 1) + rendered_lines = [] + + def render_line(line: str, cls: str) -> None: + line = line.expandtabs().rstrip() + stripped_line = line.strip() + prefix = len(line) - len(stripped_line) + colno = getattr(self, "colno", 0) + end_colno = getattr(self, "end_colno", 0) + + if cls == "current" and colno and end_colno: + arrow = ( + f'\n<span class="ws">{" " * prefix}</span>' + f'{" " * (colno - prefix)}{"^" * (end_colno - colno)}' + ) + else: + arrow = "" + + rendered_lines.append( + f'<pre class="line {cls}"><span class="ws">{" " * prefix}</span>' + f"{escape(stripped_line) if stripped_line else ' '}" + f"{arrow if arrow else ''}</pre>" + ) + + if lines: + for line in lines[start_idx:line_idx]: + render_line(line, "before") + + render_line(lines[line_idx], "current") + + for line in lines[line_idx + 1 : stop_idx]: + render_line(line, "after") + + return FRAME_HTML % { + "id": id(self), + "filename": escape(self.filename), + "lineno": self.lineno, + "function_name": escape(self.name), + "lines": "\n".join(rendered_lines), + "library": "library" if mark_library and self.is_library else "", + } + + +def render_console_html(secret: str, evalex_trusted: bool) -> str: + return CONSOLE_HTML % { + "evalex": "true", + "evalex_trusted": "true" if evalex_trusted else "false", + "console": "true", + "title": "Console", + "secret": secret, + } diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/exceptions.py b/backend/test/lib/python3.8/site-packages/werkzeug/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..2536129180ed88e35438dcfe505780dcc479b7da --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/exceptions.py @@ -0,0 +1,879 @@ +"""Implements a number of Python exceptions which can be raised from within +a view to trigger a standard HTTP non-200 response. + +Usage Example +------------- + +.. code-block:: python + + from werkzeug.wrappers.request import Request + from werkzeug.exceptions import HTTPException, NotFound + + def view(request): + raise NotFound() + + @Request.application + def application(request): + try: + return view(request) + except HTTPException as e: + return e + +As you can see from this example those exceptions are callable WSGI +applications. However, they are not Werkzeug response objects. You +can get a response object by calling ``get_response()`` on a HTTP +exception. + +Keep in mind that you may have to pass an environ (WSGI) or scope +(ASGI) to ``get_response()`` because some errors fetch additional +information relating to the request. + +If you want to hook in a different exception page to say, a 404 status +code, you can add a second except for a specific subclass of an error: + +.. code-block:: python + + @Request.application + def application(request): + try: + return view(request) + except NotFound as e: + return not_found(request) + except HTTPException as e: + return e + +""" +from __future__ import annotations + +import typing as t +from datetime import datetime + +from markupsafe import escape +from markupsafe import Markup + +from ._internal import _get_environ + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIEnvironment + from .datastructures import WWWAuthenticate + from .sansio.response import Response + from .wrappers.request import Request as WSGIRequest + from .wrappers.response import Response as WSGIResponse + + +class HTTPException(Exception): + """The base class for all HTTP exceptions. This exception can be called as a WSGI + application to render a default error page or you can catch the subclasses + of it independently and render nicer error messages. + + .. versionchanged:: 2.1 + Removed the ``wrap`` class method. + """ + + code: int | None = None + description: str | None = None + + def __init__( + self, + description: str | None = None, + response: Response | None = None, + ) -> None: + super().__init__() + if description is not None: + self.description = description + self.response = response + + @property + def name(self) -> str: + """The status name.""" + from .http import HTTP_STATUS_CODES + + return HTTP_STATUS_CODES.get(self.code, "Unknown Error") # type: ignore + + def get_description( + self, + environ: WSGIEnvironment | None = None, + scope: dict | None = None, + ) -> str: + """Get the description.""" + if self.description is None: + description = "" + else: + description = self.description + + description = escape(description).replace("\n", Markup("<br>")) + return f"<p>{description}</p>" + + def get_body( + self, + environ: WSGIEnvironment | None = None, + scope: dict | None = None, + ) -> str: + """Get the HTML body.""" + return ( + "<!doctype html>\n" + "<html lang=en>\n" + f"<title>{self.code} {escape(self.name)}</title>\n" + f"<h1>{escape(self.name)}</h1>\n" + f"{self.get_description(environ)}\n" + ) + + def get_headers( + self, + environ: WSGIEnvironment | None = None, + scope: dict | None = None, + ) -> list[tuple[str, str]]: + """Get a list of headers.""" + return [("Content-Type", "text/html; charset=utf-8")] + + def get_response( + self, + environ: WSGIEnvironment | WSGIRequest | None = None, + scope: dict | None = None, + ) -> Response: + """Get a response object. If one was passed to the exception + it's returned directly. + + :param environ: the optional environ for the request. This + can be used to modify the response depending + on how the request looked like. + :return: a :class:`Response` object or a subclass thereof. + """ + from .wrappers.response import Response as WSGIResponse # noqa: F811 + + if self.response is not None: + return self.response + if environ is not None: + environ = _get_environ(environ) + headers = self.get_headers(environ, scope) + return WSGIResponse(self.get_body(environ, scope), self.code, headers) + + def __call__( + self, environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + """Call the exception as WSGI application. + + :param environ: the WSGI environment. + :param start_response: the response callable provided by the WSGI + server. + """ + response = t.cast("WSGIResponse", self.get_response(environ)) + return response(environ, start_response) + + def __str__(self) -> str: + code = self.code if self.code is not None else "???" + return f"{code} {self.name}: {self.description}" + + def __repr__(self) -> str: + code = self.code if self.code is not None else "???" + return f"<{type(self).__name__} '{code}: {self.name}'>" + + +class BadRequest(HTTPException): + """*400* `Bad Request` + + Raise if the browser sends something to the application the application + or server cannot handle. + """ + + code = 400 + description = ( + "The browser (or proxy) sent a request that this server could " + "not understand." + ) + + +class BadRequestKeyError(BadRequest, KeyError): + """An exception that is used to signal both a :exc:`KeyError` and a + :exc:`BadRequest`. Used by many of the datastructures. + """ + + _description = BadRequest.description + #: Show the KeyError along with the HTTP error message in the + #: response. This should be disabled in production, but can be + #: useful in a debug mode. + show_exception = False + + def __init__(self, arg: str | None = None, *args: t.Any, **kwargs: t.Any): + super().__init__(*args, **kwargs) + + if arg is None: + KeyError.__init__(self) + else: + KeyError.__init__(self, arg) + + @property # type: ignore + def description(self) -> str: + if self.show_exception: + return ( + f"{self._description}\n" + f"{KeyError.__name__}: {KeyError.__str__(self)}" + ) + + return self._description + + @description.setter + def description(self, value: str) -> None: + self._description = value + + +class ClientDisconnected(BadRequest): + """Internal exception that is raised if Werkzeug detects a disconnected + client. Since the client is already gone at that point attempting to + send the error message to the client might not work and might ultimately + result in another exception in the server. Mainly this is here so that + it is silenced by default as far as Werkzeug is concerned. + + Since disconnections cannot be reliably detected and are unspecified + by WSGI to a large extent this might or might not be raised if a client + is gone. + + .. versionadded:: 0.8 + """ + + +class SecurityError(BadRequest): + """Raised if something triggers a security error. This is otherwise + exactly like a bad request error. + + .. versionadded:: 0.9 + """ + + +class BadHost(BadRequest): + """Raised if the submitted host is badly formatted. + + .. versionadded:: 0.11.2 + """ + + +class Unauthorized(HTTPException): + """*401* ``Unauthorized`` + + Raise if the user is not authorized to access a resource. + + The ``www_authenticate`` argument should be used to set the + ``WWW-Authenticate`` header. This is used for HTTP basic auth and + other schemes. Use :class:`~werkzeug.datastructures.WWWAuthenticate` + to create correctly formatted values. Strictly speaking a 401 + response is invalid if it doesn't provide at least one value for + this header, although real clients typically don't care. + + :param description: Override the default message used for the body + of the response. + :param www-authenticate: A single value, or list of values, for the + WWW-Authenticate header(s). + + .. versionchanged:: 2.0 + Serialize multiple ``www_authenticate`` items into multiple + ``WWW-Authenticate`` headers, rather than joining them + into a single value, for better interoperability. + + .. versionchanged:: 0.15.3 + If the ``www_authenticate`` argument is not set, the + ``WWW-Authenticate`` header is not set. + + .. versionchanged:: 0.15.3 + The ``response`` argument was restored. + + .. versionchanged:: 0.15.1 + ``description`` was moved back as the first argument, restoring + its previous position. + + .. versionchanged:: 0.15.0 + ``www_authenticate`` was added as the first argument, ahead of + ``description``. + """ + + code = 401 + description = ( + "The server could not verify that you are authorized to access" + " the URL requested. You either supplied the wrong credentials" + " (e.g. a bad password), or your browser doesn't understand" + " how to supply the credentials required." + ) + + def __init__( + self, + description: str | None = None, + response: Response | None = None, + www_authenticate: None | (WWWAuthenticate | t.Iterable[WWWAuthenticate]) = None, + ) -> None: + super().__init__(description, response) + + from .datastructures import WWWAuthenticate + + if isinstance(www_authenticate, WWWAuthenticate): + www_authenticate = (www_authenticate,) + + self.www_authenticate = www_authenticate + + def get_headers( + self, + environ: WSGIEnvironment | None = None, + scope: dict | None = None, + ) -> list[tuple[str, str]]: + headers = super().get_headers(environ, scope) + if self.www_authenticate: + headers.extend(("WWW-Authenticate", str(x)) for x in self.www_authenticate) + return headers + + +class Forbidden(HTTPException): + """*403* `Forbidden` + + Raise if the user doesn't have the permission for the requested resource + but was authenticated. + """ + + code = 403 + description = ( + "You don't have the permission to access the requested" + " resource. It is either read-protected or not readable by the" + " server." + ) + + +class NotFound(HTTPException): + """*404* `Not Found` + + Raise if a resource does not exist and never existed. + """ + + code = 404 + description = ( + "The requested URL was not found on the server. If you entered" + " the URL manually please check your spelling and try again." + ) + + +class MethodNotAllowed(HTTPException): + """*405* `Method Not Allowed` + + Raise if the server used a method the resource does not handle. For + example `POST` if the resource is view only. Especially useful for REST. + + The first argument for this exception should be a list of allowed methods. + Strictly speaking the response would be invalid if you don't provide valid + methods in the header which you can do with that list. + """ + + code = 405 + description = "The method is not allowed for the requested URL." + + def __init__( + self, + valid_methods: t.Iterable[str] | None = None, + description: str | None = None, + response: Response | None = None, + ) -> None: + """Takes an optional list of valid http methods + starting with werkzeug 0.3 the list will be mandatory.""" + super().__init__(description=description, response=response) + self.valid_methods = valid_methods + + def get_headers( + self, + environ: WSGIEnvironment | None = None, + scope: dict | None = None, + ) -> list[tuple[str, str]]: + headers = super().get_headers(environ, scope) + if self.valid_methods: + headers.append(("Allow", ", ".join(self.valid_methods))) + return headers + + +class NotAcceptable(HTTPException): + """*406* `Not Acceptable` + + Raise if the server can't return any content conforming to the + `Accept` headers of the client. + """ + + code = 406 + description = ( + "The resource identified by the request is only capable of" + " generating response entities which have content" + " characteristics not acceptable according to the accept" + " headers sent in the request." + ) + + +class RequestTimeout(HTTPException): + """*408* `Request Timeout` + + Raise to signalize a timeout. + """ + + code = 408 + description = ( + "The server closed the network connection because the browser" + " didn't finish the request within the specified time." + ) + + +class Conflict(HTTPException): + """*409* `Conflict` + + Raise to signal that a request cannot be completed because it conflicts + with the current state on the server. + + .. versionadded:: 0.7 + """ + + code = 409 + description = ( + "A conflict happened while processing the request. The" + " resource might have been modified while the request was being" + " processed." + ) + + +class Gone(HTTPException): + """*410* `Gone` + + Raise if a resource existed previously and went away without new location. + """ + + code = 410 + description = ( + "The requested URL is no longer available on this server and" + " there is no forwarding address. If you followed a link from a" + " foreign page, please contact the author of this page." + ) + + +class LengthRequired(HTTPException): + """*411* `Length Required` + + Raise if the browser submitted data but no ``Content-Length`` header which + is required for the kind of processing the server does. + """ + + code = 411 + description = ( + "A request with this method requires a valid <code>Content-" + "Length</code> header." + ) + + +class PreconditionFailed(HTTPException): + """*412* `Precondition Failed` + + Status code used in combination with ``If-Match``, ``If-None-Match``, or + ``If-Unmodified-Since``. + """ + + code = 412 + description = ( + "The precondition on the request for the URL failed positive evaluation." + ) + + +class RequestEntityTooLarge(HTTPException): + """*413* `Request Entity Too Large` + + The status code one should return if the data submitted exceeded a given + limit. + """ + + code = 413 + description = "The data value transmitted exceeds the capacity limit." + + +class RequestURITooLarge(HTTPException): + """*414* `Request URI Too Large` + + Like *413* but for too long URLs. + """ + + code = 414 + description = ( + "The length of the requested URL exceeds the capacity limit for" + " this server. The request cannot be processed." + ) + + +class UnsupportedMediaType(HTTPException): + """*415* `Unsupported Media Type` + + The status code returned if the server is unable to handle the media type + the client transmitted. + """ + + code = 415 + description = ( + "The server does not support the media type transmitted in the request." + ) + + +class RequestedRangeNotSatisfiable(HTTPException): + """*416* `Requested Range Not Satisfiable` + + The client asked for an invalid part of the file. + + .. versionadded:: 0.7 + """ + + code = 416 + description = "The server cannot provide the requested range." + + def __init__( + self, + length: int | None = None, + units: str = "bytes", + description: str | None = None, + response: Response | None = None, + ) -> None: + """Takes an optional `Content-Range` header value based on ``length`` + parameter. + """ + super().__init__(description=description, response=response) + self.length = length + self.units = units + + def get_headers( + self, + environ: WSGIEnvironment | None = None, + scope: dict | None = None, + ) -> list[tuple[str, str]]: + headers = super().get_headers(environ, scope) + if self.length is not None: + headers.append(("Content-Range", f"{self.units} */{self.length}")) + return headers + + +class ExpectationFailed(HTTPException): + """*417* `Expectation Failed` + + The server cannot meet the requirements of the Expect request-header. + + .. versionadded:: 0.7 + """ + + code = 417 + description = "The server could not meet the requirements of the Expect header" + + +class ImATeapot(HTTPException): + """*418* `I'm a teapot` + + The server should return this if it is a teapot and someone attempted + to brew coffee with it. + + .. versionadded:: 0.7 + """ + + code = 418 + description = "This server is a teapot, not a coffee machine" + + +class UnprocessableEntity(HTTPException): + """*422* `Unprocessable Entity` + + Used if the request is well formed, but the instructions are otherwise + incorrect. + """ + + code = 422 + description = ( + "The request was well-formed but was unable to be followed due" + " to semantic errors." + ) + + +class Locked(HTTPException): + """*423* `Locked` + + Used if the resource that is being accessed is locked. + """ + + code = 423 + description = "The resource that is being accessed is locked." + + +class FailedDependency(HTTPException): + """*424* `Failed Dependency` + + Used if the method could not be performed on the resource + because the requested action depended on another action and that action failed. + """ + + code = 424 + description = ( + "The method could not be performed on the resource because the" + " requested action depended on another action and that action" + " failed." + ) + + +class PreconditionRequired(HTTPException): + """*428* `Precondition Required` + + The server requires this request to be conditional, typically to prevent + the lost update problem, which is a race condition between two or more + clients attempting to update a resource through PUT or DELETE. By requiring + each client to include a conditional header ("If-Match" or "If-Unmodified- + Since") with the proper value retained from a recent GET request, the + server ensures that each client has at least seen the previous revision of + the resource. + """ + + code = 428 + description = ( + "This request is required to be conditional; try using" + ' "If-Match" or "If-Unmodified-Since".' + ) + + +class _RetryAfter(HTTPException): + """Adds an optional ``retry_after`` parameter which will set the + ``Retry-After`` header. May be an :class:`int` number of seconds or + a :class:`~datetime.datetime`. + """ + + def __init__( + self, + description: str | None = None, + response: Response | None = None, + retry_after: datetime | int | None = None, + ) -> None: + super().__init__(description, response) + self.retry_after = retry_after + + def get_headers( + self, + environ: WSGIEnvironment | None = None, + scope: dict | None = None, + ) -> list[tuple[str, str]]: + headers = super().get_headers(environ, scope) + + if self.retry_after: + if isinstance(self.retry_after, datetime): + from .http import http_date + + value = http_date(self.retry_after) + else: + value = str(self.retry_after) + + headers.append(("Retry-After", value)) + + return headers + + +class TooManyRequests(_RetryAfter): + """*429* `Too Many Requests` + + The server is limiting the rate at which this user receives + responses, and this request exceeds that rate. (The server may use + any convenient method to identify users and their request rates). + The server may include a "Retry-After" header to indicate how long + the user should wait before retrying. + + :param retry_after: If given, set the ``Retry-After`` header to this + value. May be an :class:`int` number of seconds or a + :class:`~datetime.datetime`. + + .. versionchanged:: 1.0 + Added ``retry_after`` parameter. + """ + + code = 429 + description = "This user has exceeded an allotted request count. Try again later." + + +class RequestHeaderFieldsTooLarge(HTTPException): + """*431* `Request Header Fields Too Large` + + The server refuses to process the request because the header fields are too + large. One or more individual fields may be too large, or the set of all + headers is too large. + """ + + code = 431 + description = "One or more header fields exceeds the maximum size." + + +class UnavailableForLegalReasons(HTTPException): + """*451* `Unavailable For Legal Reasons` + + This status code indicates that the server is denying access to the + resource as a consequence of a legal demand. + """ + + code = 451 + description = "Unavailable for legal reasons." + + +class InternalServerError(HTTPException): + """*500* `Internal Server Error` + + Raise if an internal server error occurred. This is a good fallback if an + unknown error occurred in the dispatcher. + + .. versionchanged:: 1.0.0 + Added the :attr:`original_exception` attribute. + """ + + code = 500 + description = ( + "The server encountered an internal error and was unable to" + " complete your request. Either the server is overloaded or" + " there is an error in the application." + ) + + def __init__( + self, + description: str | None = None, + response: Response | None = None, + original_exception: BaseException | None = None, + ) -> None: + #: The original exception that caused this 500 error. Can be + #: used by frameworks to provide context when handling + #: unexpected errors. + self.original_exception = original_exception + super().__init__(description=description, response=response) + + +class NotImplemented(HTTPException): + """*501* `Not Implemented` + + Raise if the application does not support the action requested by the + browser. + """ + + code = 501 + description = "The server does not support the action requested by the browser." + + +class BadGateway(HTTPException): + """*502* `Bad Gateway` + + If you do proxying in your application you should return this status code + if you received an invalid response from the upstream server it accessed + in attempting to fulfill the request. + """ + + code = 502 + description = ( + "The proxy server received an invalid response from an upstream server." + ) + + +class ServiceUnavailable(_RetryAfter): + """*503* `Service Unavailable` + + Status code you should return if a service is temporarily + unavailable. + + :param retry_after: If given, set the ``Retry-After`` header to this + value. May be an :class:`int` number of seconds or a + :class:`~datetime.datetime`. + + .. versionchanged:: 1.0 + Added ``retry_after`` parameter. + """ + + code = 503 + description = ( + "The server is temporarily unable to service your request due" + " to maintenance downtime or capacity problems. Please try" + " again later." + ) + + +class GatewayTimeout(HTTPException): + """*504* `Gateway Timeout` + + Status code you should return if a connection to an upstream server + times out. + """ + + code = 504 + description = "The connection to an upstream server timed out." + + +class HTTPVersionNotSupported(HTTPException): + """*505* `HTTP Version Not Supported` + + The server does not support the HTTP protocol version used in the request. + """ + + code = 505 + description = ( + "The server does not support the HTTP protocol version used in the request." + ) + + +default_exceptions: dict[int, type[HTTPException]] = {} + + +def _find_exceptions() -> None: + for obj in globals().values(): + try: + is_http_exception = issubclass(obj, HTTPException) + except TypeError: + is_http_exception = False + if not is_http_exception or obj.code is None: + continue + old_obj = default_exceptions.get(obj.code, None) + if old_obj is not None and issubclass(obj, old_obj): + continue + default_exceptions[obj.code] = obj + + +_find_exceptions() +del _find_exceptions + + +class Aborter: + """When passed a dict of code -> exception items it can be used as + callable that raises exceptions. If the first argument to the + callable is an integer it will be looked up in the mapping, if it's + a WSGI application it will be raised in a proxy exception. + + The rest of the arguments are forwarded to the exception constructor. + """ + + def __init__( + self, + mapping: dict[int, type[HTTPException]] | None = None, + extra: dict[int, type[HTTPException]] | None = None, + ) -> None: + if mapping is None: + mapping = default_exceptions + self.mapping = dict(mapping) + if extra is not None: + self.mapping.update(extra) + + def __call__( + self, code: int | Response, *args: t.Any, **kwargs: t.Any + ) -> t.NoReturn: + from .sansio.response import Response + + if isinstance(code, Response): + raise HTTPException(response=code) + + if code not in self.mapping: + raise LookupError(f"no exception for {code!r}") + + raise self.mapping[code](*args, **kwargs) + + +def abort(status: int | Response, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: + """Raises an :py:exc:`HTTPException` for the given status code or WSGI + application. + + If a status code is given, it will be looked up in the list of + exceptions and will raise that exception. If passed a WSGI application, + it will wrap it in a proxy WSGI exception and raise that:: + + abort(404) # 404 Not Found + abort(Response('Hello World')) + + """ + _aborter(status, *args, **kwargs) + + +_aborter: Aborter = Aborter() diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/formparser.py b/backend/test/lib/python3.8/site-packages/werkzeug/formparser.py new file mode 100644 index 0000000000000000000000000000000000000000..25ef0d61b13dc3bf951f30462000b518973a7c2c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/formparser.py @@ -0,0 +1,547 @@ +from __future__ import annotations + +import typing as t +import warnings +from io import BytesIO +from urllib.parse import parse_qsl + +from ._internal import _plain_int +from .datastructures import FileStorage +from .datastructures import Headers +from .datastructures import MultiDict +from .exceptions import RequestEntityTooLarge +from .http import parse_options_header +from .sansio.multipart import Data +from .sansio.multipart import Epilogue +from .sansio.multipart import Field +from .sansio.multipart import File +from .sansio.multipart import MultipartDecoder +from .sansio.multipart import NeedData +from .wsgi import get_content_length +from .wsgi import get_input_stream + +# there are some platforms where SpooledTemporaryFile is not available. +# In that case we need to provide a fallback. +try: + from tempfile import SpooledTemporaryFile +except ImportError: + from tempfile import TemporaryFile + + SpooledTemporaryFile = None # type: ignore + +if t.TYPE_CHECKING: + import typing as te + from _typeshed.wsgi import WSGIEnvironment + + t_parse_result = t.Tuple[t.IO[bytes], MultiDict, MultiDict] + + class TStreamFactory(te.Protocol): + def __call__( + self, + total_content_length: int | None, + content_type: str | None, + filename: str | None, + content_length: int | None = None, + ) -> t.IO[bytes]: + ... + + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + + +def default_stream_factory( + total_content_length: int | None, + content_type: str | None, + filename: str | None, + content_length: int | None = None, +) -> t.IO[bytes]: + max_size = 1024 * 500 + + if SpooledTemporaryFile is not None: + return t.cast(t.IO[bytes], SpooledTemporaryFile(max_size=max_size, mode="rb+")) + elif total_content_length is None or total_content_length > max_size: + return t.cast(t.IO[bytes], TemporaryFile("rb+")) + + return BytesIO() + + +def parse_form_data( + environ: WSGIEnvironment, + stream_factory: TStreamFactory | None = None, + charset: str | None = None, + errors: str | None = None, + max_form_memory_size: int | None = None, + max_content_length: int | None = None, + cls: type[MultiDict] | None = None, + silent: bool = True, + *, + max_form_parts: int | None = None, +) -> t_parse_result: + """Parse the form data in the environ and return it as tuple in the form + ``(stream, form, files)``. You should only call this method if the + transport method is `POST`, `PUT`, or `PATCH`. + + If the mimetype of the data transmitted is `multipart/form-data` the + files multidict will be filled with `FileStorage` objects. If the + mimetype is unknown the input stream is wrapped and returned as first + argument, else the stream is empty. + + This is a shortcut for the common usage of :class:`FormDataParser`. + + :param environ: the WSGI environment to be used for parsing. + :param stream_factory: An optional callable that returns a new read and + writeable file descriptor. This callable works + the same as :meth:`Response._get_file_stream`. + :param max_form_memory_size: the maximum number of bytes to be accepted for + in-memory stored form data. If the data + exceeds the value specified an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param max_content_length: If this is provided and the transmitted data + is longer than this value an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param silent: If set to False parsing errors will not be caught. + :param max_form_parts: The maximum number of multipart parts to be parsed. If this + is exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised. + :return: A tuple in the form ``(stream, form, files)``. + + .. versionchanged:: 2.3 + Added the ``max_form_parts`` parameter. + + .. versionchanged:: 2.3 + The ``charset`` and ``errors`` parameters are deprecated and will be removed in + Werkzeug 3.0. + + .. versionadded:: 0.5.1 + Added the ``silent`` parameter. + + .. versionadded:: 0.5 + Added the ``max_form_memory_size``, ``max_content_length``, and ``cls`` + parameters. + """ + return FormDataParser( + stream_factory=stream_factory, + charset=charset, + errors=errors, + max_form_memory_size=max_form_memory_size, + max_content_length=max_content_length, + max_form_parts=max_form_parts, + silent=silent, + cls=cls, + ).parse_from_environ(environ) + + +class FormDataParser: + """This class implements parsing of form data for Werkzeug. By itself + it can parse multipart and url encoded form data. It can be subclassed + and extended but for most mimetypes it is a better idea to use the + untouched stream and expose it as separate attributes on a request + object. + + :param stream_factory: An optional callable that returns a new read and + writeable file descriptor. This callable works + the same as :meth:`Response._get_file_stream`. + :param max_form_memory_size: the maximum number of bytes to be accepted for + in-memory stored form data. If the data + exceeds the value specified an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param max_content_length: If this is provided and the transmitted data + is longer than this value an + :exc:`~exceptions.RequestEntityTooLarge` + exception is raised. + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param silent: If set to False parsing errors will not be caught. + :param max_form_parts: The maximum number of multipart parts to be parsed. If this + is exceeded, a :exc:`~exceptions.RequestEntityTooLarge` exception is raised. + + .. versionchanged:: 2.3 + The ``charset`` and ``errors`` parameters are deprecated and will be removed in + Werkzeug 3.0. + + .. versionchanged:: 2.3 + The ``parse_functions`` attribute and ``get_parse_func`` methods are deprecated + and will be removed in Werkzeug 3.0. + + .. versionchanged:: 2.2.3 + Added the ``max_form_parts`` parameter. + + .. versionadded:: 0.8 + """ + + def __init__( + self, + stream_factory: TStreamFactory | None = None, + charset: str | None = None, + errors: str | None = None, + max_form_memory_size: int | None = None, + max_content_length: int | None = None, + cls: type[MultiDict] | None = None, + silent: bool = True, + *, + max_form_parts: int | None = None, + ) -> None: + if stream_factory is None: + stream_factory = default_stream_factory + + self.stream_factory = stream_factory + + if charset is not None: + warnings.warn( + "The 'charset' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + charset = "utf-8" + + self.charset = charset + + if errors is not None: + warnings.warn( + "The 'errors' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + errors = "replace" + + self.errors = errors + self.max_form_memory_size = max_form_memory_size + self.max_content_length = max_content_length + self.max_form_parts = max_form_parts + + if cls is None: + cls = MultiDict + + self.cls = cls + self.silent = silent + + def get_parse_func( + self, mimetype: str, options: dict[str, str] + ) -> None | ( + t.Callable[ + [FormDataParser, t.IO[bytes], str, int | None, dict[str, str]], + t_parse_result, + ] + ): + warnings.warn( + "The 'get_parse_func' method is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + if mimetype == "multipart/form-data": + return type(self)._parse_multipart + elif mimetype == "application/x-www-form-urlencoded": + return type(self)._parse_urlencoded + elif mimetype == "application/x-url-encoded": + warnings.warn( + "The 'application/x-url-encoded' mimetype is invalid, and will not be" + " treated as 'application/x-www-form-urlencoded' in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + return type(self)._parse_urlencoded + elif mimetype in self.parse_functions: + warnings.warn( + "The 'parse_functions' attribute is deprecated and will be removed in" + " Werkzeug 3.0. Override 'parse' instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.parse_functions[mimetype] + + return None + + def parse_from_environ(self, environ: WSGIEnvironment) -> t_parse_result: + """Parses the information from the environment as form data. + + :param environ: the WSGI environment to be used for parsing. + :return: A tuple in the form ``(stream, form, files)``. + """ + stream = get_input_stream(environ, max_content_length=self.max_content_length) + content_length = get_content_length(environ) + mimetype, options = parse_options_header(environ.get("CONTENT_TYPE")) + return self.parse( + stream, + content_length=content_length, + mimetype=mimetype, + options=options, + ) + + def parse( + self, + stream: t.IO[bytes], + mimetype: str, + content_length: int | None, + options: dict[str, str] | None = None, + ) -> t_parse_result: + """Parses the information from the given stream, mimetype, + content length and mimetype parameters. + + :param stream: an input stream + :param mimetype: the mimetype of the data + :param content_length: the content length of the incoming data + :param options: optional mimetype parameters (used for + the multipart boundary for instance) + :return: A tuple in the form ``(stream, form, files)``. + + .. versionchanged:: 2.3 + The ``application/x-url-encoded`` content type is deprecated and will not be + treated as ``application/x-www-form-urlencoded`` in Werkzeug 3.0. + """ + if mimetype == "multipart/form-data": + parse_func = self._parse_multipart + elif mimetype == "application/x-www-form-urlencoded": + parse_func = self._parse_urlencoded + elif mimetype == "application/x-url-encoded": + warnings.warn( + "The 'application/x-url-encoded' mimetype is invalid, and will not be" + " treated as 'application/x-www-form-urlencoded' in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + parse_func = self._parse_urlencoded + elif mimetype in self.parse_functions: + warnings.warn( + "The 'parse_functions' attribute is deprecated and will be removed in" + " Werkzeug 3.0. Override 'parse' instead.", + DeprecationWarning, + stacklevel=2, + ) + parse_func = self.parse_functions[mimetype].__get__(self, type(self)) + else: + return stream, self.cls(), self.cls() + + if options is None: + options = {} + + try: + return parse_func(stream, mimetype, content_length, options) + except ValueError: + if not self.silent: + raise + + return stream, self.cls(), self.cls() + + def _parse_multipart( + self, + stream: t.IO[bytes], + mimetype: str, + content_length: int | None, + options: dict[str, str], + ) -> t_parse_result: + charset = self.charset if self.charset != "utf-8" else None + errors = self.errors if self.errors != "replace" else None + parser = MultiPartParser( + stream_factory=self.stream_factory, + charset=charset, + errors=errors, + max_form_memory_size=self.max_form_memory_size, + max_form_parts=self.max_form_parts, + cls=self.cls, + ) + boundary = options.get("boundary", "").encode("ascii") + + if not boundary: + raise ValueError("Missing boundary") + + form, files = parser.parse(stream, boundary, content_length) + return stream, form, files + + def _parse_urlencoded( + self, + stream: t.IO[bytes], + mimetype: str, + content_length: int | None, + options: dict[str, str], + ) -> t_parse_result: + if ( + self.max_form_memory_size is not None + and content_length is not None + and content_length > self.max_form_memory_size + ): + raise RequestEntityTooLarge() + + try: + items = parse_qsl( + stream.read().decode(), + keep_blank_values=True, + encoding=self.charset, + errors="werkzeug.url_quote", + ) + except ValueError as e: + raise RequestEntityTooLarge() from e + + return stream, self.cls(items), self.cls() + + parse_functions: dict[ + str, + t.Callable[ + [FormDataParser, t.IO[bytes], str, int | None, dict[str, str]], + t_parse_result, + ], + ] = {} + + +class MultiPartParser: + def __init__( + self, + stream_factory: TStreamFactory | None = None, + charset: str | None = None, + errors: str | None = None, + max_form_memory_size: int | None = None, + cls: type[MultiDict] | None = None, + buffer_size: int = 64 * 1024, + max_form_parts: int | None = None, + ) -> None: + if charset is not None: + warnings.warn( + "The 'charset' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + charset = "utf-8" + + self.charset = charset + + if errors is not None: + warnings.warn( + "The 'errors' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + errors = "replace" + + self.errors = errors + self.max_form_memory_size = max_form_memory_size + self.max_form_parts = max_form_parts + + if stream_factory is None: + stream_factory = default_stream_factory + + self.stream_factory = stream_factory + + if cls is None: + cls = MultiDict + + self.cls = cls + self.buffer_size = buffer_size + + def fail(self, message: str) -> te.NoReturn: + raise ValueError(message) + + def get_part_charset(self, headers: Headers) -> str: + # Figure out input charset for current part + content_type = headers.get("content-type") + + if content_type: + parameters = parse_options_header(content_type)[1] + ct_charset = parameters.get("charset", "").lower() + + # A safe list of encodings. Modern clients should only send ASCII or UTF-8. + # This list will not be extended further. + if ct_charset in {"ascii", "us-ascii", "utf-8", "iso-8859-1"}: + return ct_charset + + return self.charset + + def start_file_streaming( + self, event: File, total_content_length: int | None + ) -> t.IO[bytes]: + content_type = event.headers.get("content-type") + + try: + content_length = _plain_int(event.headers["content-length"]) + except (KeyError, ValueError): + content_length = 0 + + container = self.stream_factory( + total_content_length=total_content_length, + filename=event.filename, + content_type=content_type, + content_length=content_length, + ) + return container + + def parse( + self, stream: t.IO[bytes], boundary: bytes, content_length: int | None + ) -> tuple[MultiDict, MultiDict]: + current_part: Field | File + container: t.IO[bytes] | list[bytes] + _write: t.Callable[[bytes], t.Any] + + parser = MultipartDecoder( + boundary, + max_form_memory_size=self.max_form_memory_size, + max_parts=self.max_form_parts, + ) + + fields = [] + files = [] + + for data in _chunk_iter(stream.read, self.buffer_size): + parser.receive_data(data) + event = parser.next_event() + while not isinstance(event, (Epilogue, NeedData)): + if isinstance(event, Field): + current_part = event + container = [] + _write = container.append + elif isinstance(event, File): + current_part = event + container = self.start_file_streaming(event, content_length) + _write = container.write + elif isinstance(event, Data): + _write(event.data) + if not event.more_data: + if isinstance(current_part, Field): + value = b"".join(container).decode( + self.get_part_charset(current_part.headers), self.errors + ) + fields.append((current_part.name, value)) + else: + container = t.cast(t.IO[bytes], container) + container.seek(0) + files.append( + ( + current_part.name, + FileStorage( + container, + current_part.filename, + current_part.name, + headers=current_part.headers, + ), + ) + ) + + event = parser.next_event() + + return self.cls(fields), self.cls(files) + + +def _chunk_iter(read: t.Callable[[int], bytes], size: int) -> t.Iterator[bytes | None]: + """Read data in chunks for multipart/form-data parsing. Stop if no data is read. + Yield ``None`` at the end to signal end of parsing. + """ + while True: + data = read(size) + + if not data: + break + + yield data + + yield None diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/http.py b/backend/test/lib/python3.8/site-packages/werkzeug/http.py new file mode 100644 index 0000000000000000000000000000000000000000..07d1fd49692b76bb9ea79f427546d1ed15320069 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/http.py @@ -0,0 +1,1538 @@ +from __future__ import annotations + +import email.utils +import re +import typing as t +import warnings +from datetime import date +from datetime import datetime +from datetime import time +from datetime import timedelta +from datetime import timezone +from enum import Enum +from hashlib import sha1 +from time import mktime +from time import struct_time +from urllib.parse import quote +from urllib.parse import unquote +from urllib.request import parse_http_list as _parse_list_header + +from ._internal import _dt_as_utc +from ._internal import _plain_int + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIEnvironment + +_token_chars = frozenset( + "!#$%&'*+-.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ^_`abcdefghijklmnopqrstuvwxyz|~" +) +_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)') +_entity_headers = frozenset( + [ + "allow", + "content-encoding", + "content-language", + "content-length", + "content-location", + "content-md5", + "content-range", + "content-type", + "expires", + "last-modified", + ] +) +_hop_by_hop_headers = frozenset( + [ + "connection", + "keep-alive", + "proxy-authenticate", + "proxy-authorization", + "te", + "trailer", + "transfer-encoding", + "upgrade", + ] +) +HTTP_STATUS_CODES = { + 100: "Continue", + 101: "Switching Protocols", + 102: "Processing", + 103: "Early Hints", # see RFC 8297 + 200: "OK", + 201: "Created", + 202: "Accepted", + 203: "Non Authoritative Information", + 204: "No Content", + 205: "Reset Content", + 206: "Partial Content", + 207: "Multi Status", + 208: "Already Reported", # see RFC 5842 + 226: "IM Used", # see RFC 3229 + 300: "Multiple Choices", + 301: "Moved Permanently", + 302: "Found", + 303: "See Other", + 304: "Not Modified", + 305: "Use Proxy", + 306: "Switch Proxy", # unused + 307: "Temporary Redirect", + 308: "Permanent Redirect", + 400: "Bad Request", + 401: "Unauthorized", + 402: "Payment Required", # unused + 403: "Forbidden", + 404: "Not Found", + 405: "Method Not Allowed", + 406: "Not Acceptable", + 407: "Proxy Authentication Required", + 408: "Request Timeout", + 409: "Conflict", + 410: "Gone", + 411: "Length Required", + 412: "Precondition Failed", + 413: "Request Entity Too Large", + 414: "Request URI Too Long", + 415: "Unsupported Media Type", + 416: "Requested Range Not Satisfiable", + 417: "Expectation Failed", + 418: "I'm a teapot", # see RFC 2324 + 421: "Misdirected Request", # see RFC 7540 + 422: "Unprocessable Entity", + 423: "Locked", + 424: "Failed Dependency", + 425: "Too Early", # see RFC 8470 + 426: "Upgrade Required", + 428: "Precondition Required", # see RFC 6585 + 429: "Too Many Requests", + 431: "Request Header Fields Too Large", + 449: "Retry With", # proprietary MS extension + 451: "Unavailable For Legal Reasons", + 500: "Internal Server Error", + 501: "Not Implemented", + 502: "Bad Gateway", + 503: "Service Unavailable", + 504: "Gateway Timeout", + 505: "HTTP Version Not Supported", + 506: "Variant Also Negotiates", # see RFC 2295 + 507: "Insufficient Storage", + 508: "Loop Detected", # see RFC 5842 + 510: "Not Extended", + 511: "Network Authentication Failed", +} + + +class COEP(Enum): + """Cross Origin Embedder Policies""" + + UNSAFE_NONE = "unsafe-none" + REQUIRE_CORP = "require-corp" + + +class COOP(Enum): + """Cross Origin Opener Policies""" + + UNSAFE_NONE = "unsafe-none" + SAME_ORIGIN_ALLOW_POPUPS = "same-origin-allow-popups" + SAME_ORIGIN = "same-origin" + + +def quote_header_value( + value: t.Any, + extra_chars: str | None = None, + allow_token: bool = True, +) -> str: + """Add double quotes around a header value. If the header contains only ASCII token + characters, it will be returned unchanged. If the header contains ``"`` or ``\\`` + characters, they will be escaped with an additional ``\\`` character. + + This is the reverse of :func:`unquote_header_value`. + + :param value: The value to quote. Will be converted to a string. + :param allow_token: Disable to quote the value even if it only has token characters. + + .. versionchanged:: 2.3 + The value is quoted if it is the empty string. + + .. versionchanged:: 2.3 + Passing bytes is deprecated and will not be supported in Werkzeug 3.0. + + .. versionchanged:: 2.3 + The ``extra_chars`` parameter is deprecated and will be removed in Werkzeug 3.0. + + .. versionadded:: 0.5 + """ + if isinstance(value, bytes): + warnings.warn( + "Passing bytes is deprecated and will not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + value = value.decode("latin1") + + if extra_chars is not None: + warnings.warn( + "The 'extra_chars' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + value = str(value) + + if not value: + return '""' + + if allow_token: + token_chars = _token_chars + + if extra_chars: + token_chars |= set(extra_chars) + + if token_chars.issuperset(value): + return value + + value = value.replace("\\", "\\\\").replace('"', '\\"') + return f'"{value}"' + + +def unquote_header_value(value: str, is_filename: bool | None = None) -> str: + """Remove double quotes and decode slash-escaped ``"`` and ``\\`` characters in a + header value. + + This is the reverse of :func:`quote_header_value`. + + :param value: The header value to unquote. + + .. versionchanged:: 2.3 + The ``is_filename`` parameter is deprecated and will be removed in Werkzeug 3.0. + """ + if is_filename is not None: + warnings.warn( + "The 'is_filename' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + if len(value) >= 2 and value[0] == value[-1] == '"': + value = value[1:-1] + + if not is_filename: + return value.replace("\\\\", "\\").replace('\\"', '"') + + return value + + +def dump_options_header(header: str | None, options: t.Mapping[str, t.Any]) -> str: + """Produce a header value and ``key=value`` parameters separated by semicolons + ``;``. For example, the ``Content-Type`` header. + + .. code-block:: python + + dump_options_header("text/html", {"charset": "UTF-8"}) + 'text/html; charset=UTF-8' + + This is the reverse of :func:`parse_options_header`. + + If a value contains non-token characters, it will be quoted. + + If a value is ``None``, the parameter is skipped. + + In some keys for some headers, a UTF-8 value can be encoded using a special + ``key*=UTF-8''value`` form, where ``value`` is percent encoded. This function will + not produce that format automatically, but if a given key ends with an asterisk + ``*``, the value is assumed to have that form and will not be quoted further. + + :param header: The primary header value. + :param options: Parameters to encode as ``key=value`` pairs. + + .. versionchanged:: 2.3 + Keys with ``None`` values are skipped rather than treated as a bare key. + + .. versionchanged:: 2.2.3 + If a key ends with ``*``, its value will not be quoted. + """ + segments = [] + + if header is not None: + segments.append(header) + + for key, value in options.items(): + if value is None: + continue + + if key[-1] == "*": + segments.append(f"{key}={value}") + else: + segments.append(f"{key}={quote_header_value(value)}") + + return "; ".join(segments) + + +def dump_header( + iterable: dict[str, t.Any] | t.Iterable[t.Any], + allow_token: bool | None = None, +) -> str: + """Produce a header value from a list of items or ``key=value`` pairs, separated by + commas ``,``. + + This is the reverse of :func:`parse_list_header`, :func:`parse_dict_header`, and + :func:`parse_set_header`. + + If a value contains non-token characters, it will be quoted. + + If a value is ``None``, the key is output alone. + + In some keys for some headers, a UTF-8 value can be encoded using a special + ``key*=UTF-8''value`` form, where ``value`` is percent encoded. This function will + not produce that format automatically, but if a given key ends with an asterisk + ``*``, the value is assumed to have that form and will not be quoted further. + + .. code-block:: python + + dump_header(["foo", "bar baz"]) + 'foo, "bar baz"' + + dump_header({"foo": "bar baz"}) + 'foo="bar baz"' + + :param iterable: The items to create a header from. + + .. versionchanged:: 2.3 + The ``allow_token`` parameter is deprecated and will be removed in Werkzeug 3.0. + + .. versionchanged:: 2.2.3 + If a key ends with ``*``, its value will not be quoted. + """ + if allow_token is not None: + warnings.warn( + "'The 'allow_token' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + allow_token = True + + if isinstance(iterable, dict): + items = [] + + for key, value in iterable.items(): + if value is None: + items.append(key) + elif key[-1] == "*": + items.append(f"{key}={value}") + else: + items.append( + f"{key}={quote_header_value(value, allow_token=allow_token)}" + ) + else: + items = [quote_header_value(x, allow_token=allow_token) for x in iterable] + + return ", ".join(items) + + +def dump_csp_header(header: ds.ContentSecurityPolicy) -> str: + """Dump a Content Security Policy header. + + These are structured into policies such as "default-src 'self'; + script-src 'self'". + + .. versionadded:: 1.0.0 + Support for Content Security Policy headers was added. + + """ + return "; ".join(f"{key} {value}" for key, value in header.items()) + + +def parse_list_header(value: str) -> list[str]: + """Parse a header value that consists of a list of comma separated items according + to `RFC 9110 <https://httpwg.org/specs/rfc9110.html#abnf.extension>`__. + + This extends :func:`urllib.request.parse_http_list` to remove surrounding quotes + from values. + + .. code-block:: python + + parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + This is the reverse of :func:`dump_header`. + + :param value: The header value to parse. + """ + result = [] + + for item in _parse_list_header(value): + if len(item) >= 2 and item[0] == item[-1] == '"': + item = item[1:-1] + + result.append(item) + + return result + + +def parse_dict_header(value: str, cls: type[dict] | None = None) -> dict[str, str]: + """Parse a list header using :func:`parse_list_header`, then parse each item as a + ``key=value`` pair. + + .. code-block:: python + + parse_dict_header('a=b, c="d, e", f') + {"a": "b", "c": "d, e", "f": None} + + This is the reverse of :func:`dump_header`. + + If a key does not have a value, it is ``None``. + + This handles charsets for values as described in + `RFC 2231 <https://www.rfc-editor.org/rfc/rfc2231#section-3>`__. Only ASCII, UTF-8, + and ISO-8859-1 charsets are accepted, otherwise the value remains quoted. + + :param value: The header value to parse. + + .. versionchanged:: 2.3 + Added support for ``key*=charset''value`` encoded items. + + .. versionchanged:: 2.3 + Passing bytes is deprecated, support will be removed in Werkzeug 3.0. + + .. versionchanged:: 2.3 + The ``cls`` argument is deprecated and will be removed in Werkzeug 3.0. + + .. versionchanged:: 0.9 + The ``cls`` argument was added. + """ + if cls is None: + cls = dict + else: + warnings.warn( + "The 'cls' parameter is deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + result = cls() + + if isinstance(value, bytes): + warnings.warn( + "Passing bytes is deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + value = value.decode("latin1") + + for item in parse_list_header(value): + key, has_value, value = item.partition("=") + key = key.strip() + + if not has_value: + result[key] = None + continue + + value = value.strip() + encoding: str | None = None + + if key[-1] == "*": + # key*=charset''value becomes key=value, where value is percent encoded + # adapted from parse_options_header, without the continuation handling + key = key[:-1] + match = _charset_value_re.match(value) + + if match: + # If there is a charset marker in the value, split it off. + encoding, value = match.groups() + encoding = encoding.lower() + + # A safe list of encodings. Modern clients should only send ASCII or UTF-8. + # This list will not be extended further. An invalid encoding will leave the + # value quoted. + if encoding in {"ascii", "us-ascii", "utf-8", "iso-8859-1"}: + # invalid bytes are replaced during unquoting + value = unquote(value, encoding=encoding) + + if len(value) >= 2 and value[0] == value[-1] == '"': + value = value[1:-1] + + result[key] = value + + return result + + +# https://httpwg.org/specs/rfc9110.html#parameter +_parameter_re = re.compile( + r""" + # don't match multiple empty parts, that causes backtracking + \s*;\s* # find the part delimiter + (?: + ([\w!#$%&'*+\-.^`|~]+) # key, one or more token chars + = # equals, with no space on either side + ( # value, token or quoted string + [\w!#$%&'*+\-.^`|~]+ # one or more token chars + | + "(?:\\\\|\\"|.)*?" # quoted string, consuming slash escapes + ) + )? # optionally match key=value, to account for empty parts + """, + re.ASCII | re.VERBOSE, +) +# https://www.rfc-editor.org/rfc/rfc2231#section-4 +_charset_value_re = re.compile( + r""" + ([\w!#$%&*+\-.^`|~]*)' # charset part, could be empty + [\w!#$%&*+\-.^`|~]*' # don't care about language part, usually empty + ([\w!#$%&'*+\-.^`|~]+) # one or more token chars with percent encoding + """, + re.ASCII | re.VERBOSE, +) +# https://www.rfc-editor.org/rfc/rfc2231#section-3 +_continuation_re = re.compile(r"\*(\d+)$", re.ASCII) + + +def parse_options_header(value: str | None) -> tuple[str, dict[str, str]]: + """Parse a header that consists of a value with ``key=value`` parameters separated + by semicolons ``;``. For example, the ``Content-Type`` header. + + .. code-block:: python + + parse_options_header("text/html; charset=UTF-8") + ('text/html', {'charset': 'UTF-8'}) + + parse_options_header("") + ("", {}) + + This is the reverse of :func:`dump_options_header`. + + This parses valid parameter parts as described in + `RFC 9110 <https://httpwg.org/specs/rfc9110.html#parameter>`__. Invalid parts are + skipped. + + This handles continuations and charsets as described in + `RFC 2231 <https://www.rfc-editor.org/rfc/rfc2231#section-3>`__, although not as + strictly as the RFC. Only ASCII, UTF-8, and ISO-8859-1 charsets are accepted, + otherwise the value remains quoted. + + Clients may not be consistent in how they handle a quote character within a quoted + value. The `HTML Standard <https://html.spec.whatwg.org/#multipart-form-data>`__ + replaces it with ``%22`` in multipart form data. + `RFC 9110 <https://httpwg.org/specs/rfc9110.html#quoted.strings>`__ uses backslash + escapes in HTTP headers. Both are decoded to the ``"`` character. + + Clients may not be consistent in how they handle non-ASCII characters. HTML + documents must declare ``<meta charset=UTF-8>``, otherwise browsers may replace with + HTML character references, which can be decoded using :func:`html.unescape`. + + :param value: The header value to parse. + :return: ``(value, options)``, where ``options`` is a dict + + .. versionchanged:: 2.3 + Invalid parts, such as keys with no value, quoted keys, and incorrectly quoted + values, are discarded instead of treating as ``None``. + + .. versionchanged:: 2.3 + Only ASCII, UTF-8, and ISO-8859-1 are accepted for charset values. + + .. versionchanged:: 2.3 + Escaped quotes in quoted values, like ``%22`` and ``\\"``, are handled. + + .. versionchanged:: 2.2 + Option names are always converted to lowercase. + + .. versionchanged:: 2.2 + The ``multiple`` parameter was removed. + + .. versionchanged:: 0.15 + :rfc:`2231` parameter continuations are handled. + + .. versionadded:: 0.5 + """ + if value is None: + return "", {} + + value, _, rest = value.partition(";") + value = value.strip() + rest = rest.strip() + + if not value or not rest: + # empty (invalid) value, or value without options + return value, {} + + rest = f";{rest}" + options: dict[str, str] = {} + encoding: str | None = None + continued_encoding: str | None = None + + for pk, pv in _parameter_re.findall(rest): + if not pk: + # empty or invalid part + continue + + pk = pk.lower() + + if pk[-1] == "*": + # key*=charset''value becomes key=value, where value is percent encoded + pk = pk[:-1] + match = _charset_value_re.match(pv) + + if match: + # If there is a valid charset marker in the value, split it off. + encoding, pv = match.groups() + # This might be the empty string, handled next. + encoding = encoding.lower() + + # No charset marker, or marker with empty charset value. + if not encoding: + encoding = continued_encoding + + # A safe list of encodings. Modern clients should only send ASCII or UTF-8. + # This list will not be extended further. An invalid encoding will leave the + # value quoted. + if encoding in {"ascii", "us-ascii", "utf-8", "iso-8859-1"}: + # Continuation parts don't require their own charset marker. This is + # looser than the RFC, it will persist across different keys and allows + # changing the charset during a continuation. But this implementation is + # much simpler than tracking the full state. + continued_encoding = encoding + # invalid bytes are replaced during unquoting + pv = unquote(pv, encoding=encoding) + + # Remove quotes. At this point the value cannot be empty or a single quote. + if pv[0] == pv[-1] == '"': + # HTTP headers use slash, multipart form data uses percent + pv = pv[1:-1].replace("\\\\", "\\").replace('\\"', '"').replace("%22", '"') + + match = _continuation_re.search(pk) + + if match: + # key*0=a; key*1=b becomes key=ab + pk = pk[: match.start()] + options[pk] = options.get(pk, "") + pv + else: + options[pk] = pv + + return value, options + + +_q_value_re = re.compile(r"-?\d+(\.\d+)?", re.ASCII) +_TAnyAccept = t.TypeVar("_TAnyAccept", bound="ds.Accept") + + +@t.overload +def parse_accept_header(value: str | None) -> ds.Accept: + ... + + +@t.overload +def parse_accept_header(value: str | None, cls: type[_TAnyAccept]) -> _TAnyAccept: + ... + + +def parse_accept_header( + value: str | None, cls: type[_TAnyAccept] | None = None +) -> _TAnyAccept: + """Parse an ``Accept`` header according to + `RFC 9110 <https://httpwg.org/specs/rfc9110.html#field.accept>`__. + + Returns an :class:`.Accept` instance, which can sort and inspect items based on + their quality parameter. When parsing ``Accept-Charset``, ``Accept-Encoding``, or + ``Accept-Language``, pass the appropriate :class:`.Accept` subclass. + + :param value: The header value to parse. + :param cls: The :class:`.Accept` class to wrap the result in. + :return: An instance of ``cls``. + + .. versionchanged:: 2.3 + Parse according to RFC 9110. Items with invalid ``q`` values are skipped. + """ + if cls is None: + cls = t.cast(t.Type[_TAnyAccept], ds.Accept) + + if not value: + return cls(None) + + result = [] + + for item in parse_list_header(value): + item, options = parse_options_header(item) + + if "q" in options: + # pop q, remaining options are reconstructed + q_str = options.pop("q").strip() + + if _q_value_re.fullmatch(q_str) is None: + # ignore an invalid q + continue + + q = float(q_str) + + if q < 0 or q > 1: + # ignore an invalid q + continue + else: + q = 1 + + if options: + # reconstruct the media type with any options + item = dump_options_header(item, options) + + result.append((item, q)) + + return cls(result) + + +_TAnyCC = t.TypeVar("_TAnyCC", bound="ds.cache_control._CacheControl") +_t_cc_update = t.Optional[t.Callable[[_TAnyCC], None]] + + +@t.overload +def parse_cache_control_header( + value: str | None, on_update: _t_cc_update, cls: None = None +) -> ds.RequestCacheControl: + ... + + +@t.overload +def parse_cache_control_header( + value: str | None, on_update: _t_cc_update, cls: type[_TAnyCC] +) -> _TAnyCC: + ... + + +def parse_cache_control_header( + value: str | None, + on_update: _t_cc_update = None, + cls: type[_TAnyCC] | None = None, +) -> _TAnyCC: + """Parse a cache control header. The RFC differs between response and + request cache control, this method does not. It's your responsibility + to not use the wrong control statements. + + .. versionadded:: 0.5 + The `cls` was added. If not specified an immutable + :class:`~werkzeug.datastructures.RequestCacheControl` is returned. + + :param value: a cache control header to be parsed. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.CacheControl` + object is changed. + :param cls: the class for the returned object. By default + :class:`~werkzeug.datastructures.RequestCacheControl` is used. + :return: a `cls` object. + """ + if cls is None: + cls = t.cast(t.Type[_TAnyCC], ds.RequestCacheControl) + + if not value: + return cls((), on_update) + + return cls(parse_dict_header(value), on_update) + + +_TAnyCSP = t.TypeVar("_TAnyCSP", bound="ds.ContentSecurityPolicy") +_t_csp_update = t.Optional[t.Callable[[_TAnyCSP], None]] + + +@t.overload +def parse_csp_header( + value: str | None, on_update: _t_csp_update, cls: None = None +) -> ds.ContentSecurityPolicy: + ... + + +@t.overload +def parse_csp_header( + value: str | None, on_update: _t_csp_update, cls: type[_TAnyCSP] +) -> _TAnyCSP: + ... + + +def parse_csp_header( + value: str | None, + on_update: _t_csp_update = None, + cls: type[_TAnyCSP] | None = None, +) -> _TAnyCSP: + """Parse a Content Security Policy header. + + .. versionadded:: 1.0.0 + Support for Content Security Policy headers was added. + + :param value: a csp header to be parsed. + :param on_update: an optional callable that is called every time a value + on the object is changed. + :param cls: the class for the returned object. By default + :class:`~werkzeug.datastructures.ContentSecurityPolicy` is used. + :return: a `cls` object. + """ + if cls is None: + cls = t.cast(t.Type[_TAnyCSP], ds.ContentSecurityPolicy) + + if value is None: + return cls((), on_update) + + items = [] + + for policy in value.split(";"): + policy = policy.strip() + + # Ignore badly formatted policies (no space) + if " " in policy: + directive, value = policy.strip().split(" ", 1) + items.append((directive.strip(), value.strip())) + + return cls(items, on_update) + + +def parse_set_header( + value: str | None, + on_update: t.Callable[[ds.HeaderSet], None] | None = None, +) -> ds.HeaderSet: + """Parse a set-like header and return a + :class:`~werkzeug.datastructures.HeaderSet` object: + + >>> hs = parse_set_header('token, "quoted value"') + + The return value is an object that treats the items case-insensitively + and keeps the order of the items: + + >>> 'TOKEN' in hs + True + >>> hs.index('quoted value') + 1 + >>> hs + HeaderSet(['token', 'quoted value']) + + To create a header from the :class:`HeaderSet` again, use the + :func:`dump_header` function. + + :param value: a set header to be parsed. + :param on_update: an optional callable that is called every time a + value on the :class:`~werkzeug.datastructures.HeaderSet` + object is changed. + :return: a :class:`~werkzeug.datastructures.HeaderSet` + """ + if not value: + return ds.HeaderSet(None, on_update) + return ds.HeaderSet(parse_list_header(value), on_update) + + +def parse_authorization_header( + value: str | None, +) -> ds.Authorization | None: + """Parse an HTTP basic/digest authorization header transmitted by the web + browser. The return value is either `None` if the header was invalid or + not given, otherwise an :class:`~werkzeug.datastructures.Authorization` + object. + + :param value: the authorization header to parse. + :return: a :class:`~werkzeug.datastructures.Authorization` object or `None`. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use :meth:`.Authorization.from_header` instead. + """ + from .datastructures import Authorization + + warnings.warn( + "'parse_authorization_header' is deprecated and will be removed in Werkzeug" + " 2.4. Use 'Authorization.from_header' instead.", + DeprecationWarning, + stacklevel=2, + ) + return Authorization.from_header(value) + + +def parse_www_authenticate_header( + value: str | None, + on_update: t.Callable[[ds.WWWAuthenticate], None] | None = None, +) -> ds.WWWAuthenticate: + """Parse an HTTP WWW-Authenticate header into a + :class:`~werkzeug.datastructures.WWWAuthenticate` object. + + :param value: a WWW-Authenticate header to parse. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.WWWAuthenticate` + object is changed. + :return: a :class:`~werkzeug.datastructures.WWWAuthenticate` object. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use :meth:`.WWWAuthenticate.from_header` + instead. + """ + from .datastructures.auth import WWWAuthenticate + + warnings.warn( + "'parse_www_authenticate_header' is deprecated and will be removed in Werkzeug" + " 2.4. Use 'WWWAuthenticate.from_header' instead.", + DeprecationWarning, + stacklevel=2, + ) + rv = WWWAuthenticate.from_header(value) + + if rv is None: + rv = WWWAuthenticate("basic") + + rv._on_update = on_update + return rv + + +def parse_if_range_header(value: str | None) -> ds.IfRange: + """Parses an if-range header which can be an etag or a date. Returns + a :class:`~werkzeug.datastructures.IfRange` object. + + .. versionchanged:: 2.0 + If the value represents a datetime, it is timezone-aware. + + .. versionadded:: 0.7 + """ + if not value: + return ds.IfRange() + date = parse_date(value) + if date is not None: + return ds.IfRange(date=date) + # drop weakness information + return ds.IfRange(unquote_etag(value)[0]) + + +def parse_range_header( + value: str | None, make_inclusive: bool = True +) -> ds.Range | None: + """Parses a range header into a :class:`~werkzeug.datastructures.Range` + object. If the header is missing or malformed `None` is returned. + `ranges` is a list of ``(start, stop)`` tuples where the ranges are + non-inclusive. + + .. versionadded:: 0.7 + """ + if not value or "=" not in value: + return None + + ranges = [] + last_end = 0 + units, rng = value.split("=", 1) + units = units.strip().lower() + + for item in rng.split(","): + item = item.strip() + if "-" not in item: + return None + if item.startswith("-"): + if last_end < 0: + return None + try: + begin = _plain_int(item) + except ValueError: + return None + end = None + last_end = -1 + elif "-" in item: + begin_str, end_str = item.split("-", 1) + begin_str = begin_str.strip() + end_str = end_str.strip() + + try: + begin = _plain_int(begin_str) + except ValueError: + return None + + if begin < last_end or last_end < 0: + return None + if end_str: + try: + end = _plain_int(end_str) + 1 + except ValueError: + return None + + if begin >= end: + return None + else: + end = None + last_end = end if end is not None else -1 + ranges.append((begin, end)) + + return ds.Range(units, ranges) + + +def parse_content_range_header( + value: str | None, + on_update: t.Callable[[ds.ContentRange], None] | None = None, +) -> ds.ContentRange | None: + """Parses a range header into a + :class:`~werkzeug.datastructures.ContentRange` object or `None` if + parsing is not possible. + + .. versionadded:: 0.7 + + :param value: a content range header to be parsed. + :param on_update: an optional callable that is called every time a value + on the :class:`~werkzeug.datastructures.ContentRange` + object is changed. + """ + if value is None: + return None + try: + units, rangedef = (value or "").strip().split(None, 1) + except ValueError: + return None + + if "/" not in rangedef: + return None + rng, length_str = rangedef.split("/", 1) + if length_str == "*": + length = None + else: + try: + length = _plain_int(length_str) + except ValueError: + return None + + if rng == "*": + if not is_byte_range_valid(None, None, length): + return None + + return ds.ContentRange(units, None, None, length, on_update=on_update) + elif "-" not in rng: + return None + + start_str, stop_str = rng.split("-", 1) + try: + start = _plain_int(start_str) + stop = _plain_int(stop_str) + 1 + except ValueError: + return None + + if is_byte_range_valid(start, stop, length): + return ds.ContentRange(units, start, stop, length, on_update=on_update) + + return None + + +def quote_etag(etag: str, weak: bool = False) -> str: + """Quote an etag. + + :param etag: the etag to quote. + :param weak: set to `True` to tag it "weak". + """ + if '"' in etag: + raise ValueError("invalid etag") + etag = f'"{etag}"' + if weak: + etag = f"W/{etag}" + return etag + + +def unquote_etag( + etag: str | None, +) -> tuple[str, bool] | tuple[None, None]: + """Unquote a single etag: + + >>> unquote_etag('W/"bar"') + ('bar', True) + >>> unquote_etag('"bar"') + ('bar', False) + + :param etag: the etag identifier to unquote. + :return: a ``(etag, weak)`` tuple. + """ + if not etag: + return None, None + etag = etag.strip() + weak = False + if etag.startswith(("W/", "w/")): + weak = True + etag = etag[2:] + if etag[:1] == etag[-1:] == '"': + etag = etag[1:-1] + return etag, weak + + +def parse_etags(value: str | None) -> ds.ETags: + """Parse an etag header. + + :param value: the tag header to parse + :return: an :class:`~werkzeug.datastructures.ETags` object. + """ + if not value: + return ds.ETags() + strong = [] + weak = [] + end = len(value) + pos = 0 + while pos < end: + match = _etag_re.match(value, pos) + if match is None: + break + is_weak, quoted, raw = match.groups() + if raw == "*": + return ds.ETags(star_tag=True) + elif quoted: + raw = quoted + if is_weak: + weak.append(raw) + else: + strong.append(raw) + pos = match.end() + return ds.ETags(strong, weak) + + +def generate_etag(data: bytes) -> str: + """Generate an etag for some data. + + .. versionchanged:: 2.0 + Use SHA-1. MD5 may not be available in some environments. + """ + return sha1(data).hexdigest() + + +def parse_date(value: str | None) -> datetime | None: + """Parse an :rfc:`2822` date into a timezone-aware + :class:`datetime.datetime` object, or ``None`` if parsing fails. + + This is a wrapper for :func:`email.utils.parsedate_to_datetime`. It + returns ``None`` if parsing fails instead of raising an exception, + and always returns a timezone-aware datetime object. If the string + doesn't have timezone information, it is assumed to be UTC. + + :param value: A string with a supported date format. + + .. versionchanged:: 2.0 + Return a timezone-aware datetime object. Use + ``email.utils.parsedate_to_datetime``. + """ + if value is None: + return None + + try: + dt = email.utils.parsedate_to_datetime(value) + except (TypeError, ValueError): + return None + + if dt.tzinfo is None: + return dt.replace(tzinfo=timezone.utc) + + return dt + + +def http_date( + timestamp: datetime | date | int | float | struct_time | None = None, +) -> str: + """Format a datetime object or timestamp into an :rfc:`2822` date + string. + + This is a wrapper for :func:`email.utils.format_datetime`. It + assumes naive datetime objects are in UTC instead of raising an + exception. + + :param timestamp: The datetime or timestamp to format. Defaults to + the current time. + + .. versionchanged:: 2.0 + Use ``email.utils.format_datetime``. Accept ``date`` objects. + """ + if isinstance(timestamp, date): + if not isinstance(timestamp, datetime): + # Assume plain date is midnight UTC. + timestamp = datetime.combine(timestamp, time(), tzinfo=timezone.utc) + else: + # Ensure datetime is timezone-aware. + timestamp = _dt_as_utc(timestamp) + + return email.utils.format_datetime(timestamp, usegmt=True) + + if isinstance(timestamp, struct_time): + timestamp = mktime(timestamp) + + return email.utils.formatdate(timestamp, usegmt=True) + + +def parse_age(value: str | None = None) -> timedelta | None: + """Parses a base-10 integer count of seconds into a timedelta. + + If parsing fails, the return value is `None`. + + :param value: a string consisting of an integer represented in base-10 + :return: a :class:`datetime.timedelta` object or `None`. + """ + if not value: + return None + try: + seconds = int(value) + except ValueError: + return None + if seconds < 0: + return None + try: + return timedelta(seconds=seconds) + except OverflowError: + return None + + +def dump_age(age: timedelta | int | None = None) -> str | None: + """Formats the duration as a base-10 integer. + + :param age: should be an integer number of seconds, + a :class:`datetime.timedelta` object, or, + if the age is unknown, `None` (default). + """ + if age is None: + return None + if isinstance(age, timedelta): + age = int(age.total_seconds()) + else: + age = int(age) + + if age < 0: + raise ValueError("age cannot be negative") + + return str(age) + + +def is_resource_modified( + environ: WSGIEnvironment, + etag: str | None = None, + data: bytes | None = None, + last_modified: datetime | str | None = None, + ignore_if_range: bool = True, +) -> bool: + """Convenience method for conditional requests. + + :param environ: the WSGI environment of the request to be checked. + :param etag: the etag for the response for comparison. + :param data: or alternatively the data of the response to automatically + generate an etag using :func:`generate_etag`. + :param last_modified: an optional date of the last modification. + :param ignore_if_range: If `False`, `If-Range` header will be taken into + account. + :return: `True` if the resource was modified, otherwise `False`. + + .. versionchanged:: 2.0 + SHA-1 is used to generate an etag value for the data. MD5 may + not be available in some environments. + + .. versionchanged:: 1.0.0 + The check is run for methods other than ``GET`` and ``HEAD``. + """ + return _sansio_http.is_resource_modified( + http_range=environ.get("HTTP_RANGE"), + http_if_range=environ.get("HTTP_IF_RANGE"), + http_if_modified_since=environ.get("HTTP_IF_MODIFIED_SINCE"), + http_if_none_match=environ.get("HTTP_IF_NONE_MATCH"), + http_if_match=environ.get("HTTP_IF_MATCH"), + etag=etag, + data=data, + last_modified=last_modified, + ignore_if_range=ignore_if_range, + ) + + +def remove_entity_headers( + headers: ds.Headers | list[tuple[str, str]], + allowed: t.Iterable[str] = ("expires", "content-location"), +) -> None: + """Remove all entity headers from a list or :class:`Headers` object. This + operation works in-place. `Expires` and `Content-Location` headers are + by default not removed. The reason for this is :rfc:`2616` section + 10.3.5 which specifies some entity headers that should be sent. + + .. versionchanged:: 0.5 + added `allowed` parameter. + + :param headers: a list or :class:`Headers` object. + :param allowed: a list of headers that should still be allowed even though + they are entity headers. + """ + allowed = {x.lower() for x in allowed} + headers[:] = [ + (key, value) + for key, value in headers + if not is_entity_header(key) or key.lower() in allowed + ] + + +def remove_hop_by_hop_headers(headers: ds.Headers | list[tuple[str, str]]) -> None: + """Remove all HTTP/1.1 "Hop-by-Hop" headers from a list or + :class:`Headers` object. This operation works in-place. + + .. versionadded:: 0.5 + + :param headers: a list or :class:`Headers` object. + """ + headers[:] = [ + (key, value) for key, value in headers if not is_hop_by_hop_header(key) + ] + + +def is_entity_header(header: str) -> bool: + """Check if a header is an entity header. + + .. versionadded:: 0.5 + + :param header: the header to test. + :return: `True` if it's an entity header, `False` otherwise. + """ + return header.lower() in _entity_headers + + +def is_hop_by_hop_header(header: str) -> bool: + """Check if a header is an HTTP/1.1 "Hop-by-Hop" header. + + .. versionadded:: 0.5 + + :param header: the header to test. + :return: `True` if it's an HTTP/1.1 "Hop-by-Hop" header, `False` otherwise. + """ + return header.lower() in _hop_by_hop_headers + + +def parse_cookie( + header: WSGIEnvironment | str | None, + charset: str | None = None, + errors: str | None = None, + cls: type[ds.MultiDict] | None = None, +) -> ds.MultiDict[str, str]: + """Parse a cookie from a string or WSGI environ. + + The same key can be provided multiple times, the values are stored + in-order. The default :class:`MultiDict` will have the first value + first, and all values can be retrieved with + :meth:`MultiDict.getlist`. + + :param header: The cookie header as a string, or a WSGI environ dict + with a ``HTTP_COOKIE`` key. + :param cls: A dict-like class to store the parsed cookies in. + Defaults to :class:`MultiDict`. + + .. versionchanged:: 2.3 + Passing bytes, and the ``charset`` and ``errors`` parameters, are deprecated and + will be removed in Werkzeug 3.0. + + .. versionchanged:: 1.0 + Returns a :class:`MultiDict` instead of a ``TypeConversionDict``. + + .. versionchanged:: 0.5 + Returns a :class:`TypeConversionDict` instead of a regular dict. The ``cls`` + parameter was added. + """ + if isinstance(header, dict): + cookie = header.get("HTTP_COOKIE") + elif isinstance(header, bytes): + warnings.warn( + "Passing bytes is deprecated and will not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + cookie = header.decode() + else: + cookie = header + + if cookie: + cookie = cookie.encode("latin1").decode() + + return _sansio_http.parse_cookie( + cookie=cookie, charset=charset, errors=errors, cls=cls + ) + + +_cookie_no_quote_re = re.compile(r"[\w!#$%&'()*+\-./:<=>?@\[\]^`{|}~]*", re.A) +_cookie_slash_re = re.compile(rb"[\x00-\x19\",;\\\x7f-\xff]", re.A) +_cookie_slash_map = {b'"': b'\\"', b"\\": b"\\\\"} +_cookie_slash_map.update( + (v.to_bytes(1, "big"), b"\\%03o" % v) + for v in [*range(0x20), *b",;", *range(0x7F, 256)] +) + + +def dump_cookie( + key: str, + value: str = "", + max_age: timedelta | int | None = None, + expires: str | datetime | int | float | None = None, + path: str | None = "/", + domain: str | None = None, + secure: bool = False, + httponly: bool = False, + charset: str | None = None, + sync_expires: bool = True, + max_size: int = 4093, + samesite: str | None = None, +) -> str: + """Create a Set-Cookie header without the ``Set-Cookie`` prefix. + + The return value is usually restricted to ascii as the vast majority + of values are properly escaped, but that is no guarantee. It's + tunneled through latin1 as required by :pep:`3333`. + + The return value is not ASCII safe if the key contains unicode + characters. This is technically against the specification but + happens in the wild. It's strongly recommended to not use + non-ASCII values for the keys. + + :param max_age: should be a number of seconds, or `None` (default) if + the cookie should last only as long as the client's + browser session. Additionally `timedelta` objects + are accepted, too. + :param expires: should be a `datetime` object or unix timestamp. + :param path: limits the cookie to a given path, per default it will + span the whole domain. + :param domain: Use this if you want to set a cross-domain cookie. For + example, ``domain="example.com"`` will set a cookie + that is readable by the domain ``www.example.com``, + ``foo.example.com`` etc. Otherwise, a cookie will only + be readable by the domain that set it. + :param secure: The cookie will only be available via HTTPS + :param httponly: disallow JavaScript to access the cookie. This is an + extension to the cookie standard and probably not + supported by all browsers. + :param charset: the encoding for string values. + :param sync_expires: automatically set expires if max_age is defined + but expires not. + :param max_size: Warn if the final header value exceeds this size. The + default, 4093, should be safely `supported by most browsers + <cookie_>`_. Set to 0 to disable this check. + :param samesite: Limits the scope of the cookie such that it will + only be attached to requests if those requests are same-site. + + .. _`cookie`: http://browsercookielimits.squawky.net/ + + .. versionchanged:: 2.3.3 + The ``path`` parameter is ``/`` by default. + + .. versionchanged:: 2.3.1 + The value allows more characters without quoting. + + .. versionchanged:: 2.3 + ``localhost`` and other names without a dot are allowed for the domain. A + leading dot is ignored. + + .. versionchanged:: 2.3 + The ``path`` parameter is ``None`` by default. + + .. versionchanged:: 2.3 + Passing bytes, and the ``charset`` parameter, are deprecated and will be removed + in Werkzeug 3.0. + + .. versionchanged:: 1.0.0 + The string ``'None'`` is accepted for ``samesite``. + """ + if charset is not None: + warnings.warn( + "The 'charset' parameter is deprecated and will be removed" + " in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + charset = "utf-8" + + if isinstance(key, bytes): + warnings.warn( + "The 'key' parameter must be a string. Bytes are deprecated" + " and will not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + key = key.decode() + + if isinstance(value, bytes): + warnings.warn( + "The 'value' parameter must be a string. Bytes are" + " deprecated and will not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + value = value.decode() + + if path is not None: + # safe = https://url.spec.whatwg.org/#url-path-segment-string + # as well as percent for things that are already quoted + # excluding semicolon since it's part of the header syntax + path = quote(path, safe="%!$&'()*+,/:=@", encoding=charset) + + if domain: + domain = domain.partition(":")[0].lstrip(".").encode("idna").decode("ascii") + + if isinstance(max_age, timedelta): + max_age = int(max_age.total_seconds()) + + if expires is not None: + if not isinstance(expires, str): + expires = http_date(expires) + elif max_age is not None and sync_expires: + expires = http_date(datetime.now(tz=timezone.utc).timestamp() + max_age) + + if samesite is not None: + samesite = samesite.title() + + if samesite not in {"Strict", "Lax", "None"}: + raise ValueError("SameSite must be 'Strict', 'Lax', or 'None'.") + + # Quote value if it contains characters not allowed by RFC 6265. Slash-escape with + # three octal digits, which matches http.cookies, although the RFC suggests base64. + if not _cookie_no_quote_re.fullmatch(value): + # Work with bytes here, since a UTF-8 character could be multiple bytes. + value = _cookie_slash_re.sub( + lambda m: _cookie_slash_map[m.group()], value.encode(charset) + ).decode("ascii") + value = f'"{value}"' + + # Send a non-ASCII key as mojibake. Everything else should already be ASCII. + # TODO Remove encoding dance, it seems like clients accept UTF-8 keys + buf = [f"{key.encode().decode('latin1')}={value}"] + + for k, v in ( + ("Domain", domain), + ("Expires", expires), + ("Max-Age", max_age), + ("Secure", secure), + ("HttpOnly", httponly), + ("Path", path), + ("SameSite", samesite), + ): + if v is None or v is False: + continue + + if v is True: + buf.append(k) + continue + + buf.append(f"{k}={v}") + + rv = "; ".join(buf) + + # Warn if the final value of the cookie is larger than the limit. If the cookie is + # too large, then it may be silently ignored by the browser, which can be quite hard + # to debug. + cookie_size = len(rv) + + if max_size and cookie_size > max_size: + value_size = len(value) + warnings.warn( + f"The '{key}' cookie is too large: the value was {value_size} bytes but the" + f" header required {cookie_size - value_size} extra bytes. The final size" + f" was {cookie_size} bytes but the limit is {max_size} bytes. Browsers may" + " silently ignore cookies larger than this.", + stacklevel=2, + ) + + return rv + + +def is_byte_range_valid( + start: int | None, stop: int | None, length: int | None +) -> bool: + """Checks if a given byte content range is valid for the given length. + + .. versionadded:: 0.7 + """ + if (start is None) != (stop is None): + return False + elif start is None: + return length is None or length >= 0 + elif length is None: + return 0 <= start < stop # type: ignore + elif start >= stop: # type: ignore + return False + return 0 <= start < length + + +# circular dependencies +from . import datastructures as ds +from .sansio import http as _sansio_http diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/local.py b/backend/test/lib/python3.8/site-packages/werkzeug/local.py new file mode 100644 index 0000000000000000000000000000000000000000..fba80e974ad0b94a3657ee9601a78bf84b1febc2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/local.py @@ -0,0 +1,643 @@ +from __future__ import annotations + +import copy +import math +import operator +import typing as t +from contextvars import ContextVar +from functools import partial +from functools import update_wrapper +from operator import attrgetter + +from .wsgi import ClosingIterator + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + +T = t.TypeVar("T") +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + + +def release_local(local: Local | LocalStack) -> None: + """Release the data for the current context in a :class:`Local` or + :class:`LocalStack` without using a :class:`LocalManager`. + + This should not be needed for modern use cases, and may be removed + in the future. + + .. versionadded:: 0.6.1 + """ + local.__release_local__() + + +class Local: + """Create a namespace of context-local data. This wraps a + :class:`ContextVar` containing a :class:`dict` value. + + This may incur a performance penalty compared to using individual + context vars, as it has to copy data to avoid mutating the dict + between nested contexts. + + :param context_var: The :class:`~contextvars.ContextVar` to use as + storage for this local. If not given, one will be created. + Context vars not created at the global scope may interfere with + garbage collection. + + .. versionchanged:: 2.0 + Uses ``ContextVar`` instead of a custom storage implementation. + """ + + __slots__ = ("__storage",) + + def __init__(self, context_var: ContextVar[dict[str, t.Any]] | None = None) -> None: + if context_var is None: + # A ContextVar not created at global scope interferes with + # Python's garbage collection. However, a local only makes + # sense defined at the global scope as well, in which case + # the GC issue doesn't seem relevant. + context_var = ContextVar(f"werkzeug.Local<{id(self)}>.storage") + + object.__setattr__(self, "_Local__storage", context_var) + + def __iter__(self) -> t.Iterator[tuple[str, t.Any]]: + return iter(self.__storage.get({}).items()) + + def __call__(self, name: str, *, unbound_message: str | None = None) -> LocalProxy: + """Create a :class:`LocalProxy` that access an attribute on this + local namespace. + + :param name: Proxy this attribute. + :param unbound_message: The error message that the proxy will + show if the attribute isn't set. + """ + return LocalProxy(self, name, unbound_message=unbound_message) + + def __release_local__(self) -> None: + self.__storage.set({}) + + def __getattr__(self, name: str) -> t.Any: + values = self.__storage.get({}) + + if name in values: + return values[name] + + raise AttributeError(name) + + def __setattr__(self, name: str, value: t.Any) -> None: + values = self.__storage.get({}).copy() + values[name] = value + self.__storage.set(values) + + def __delattr__(self, name: str) -> None: + values = self.__storage.get({}) + + if name in values: + values = values.copy() + del values[name] + self.__storage.set(values) + else: + raise AttributeError(name) + + +class LocalStack(t.Generic[T]): + """Create a stack of context-local data. This wraps a + :class:`ContextVar` containing a :class:`list` value. + + This may incur a performance penalty compared to using individual + context vars, as it has to copy data to avoid mutating the list + between nested contexts. + + :param context_var: The :class:`~contextvars.ContextVar` to use as + storage for this local. If not given, one will be created. + Context vars not created at the global scope may interfere with + garbage collection. + + .. versionchanged:: 2.0 + Uses ``ContextVar`` instead of a custom storage implementation. + + .. versionadded:: 0.6.1 + """ + + __slots__ = ("_storage",) + + def __init__(self, context_var: ContextVar[list[T]] | None = None) -> None: + if context_var is None: + # A ContextVar not created at global scope interferes with + # Python's garbage collection. However, a local only makes + # sense defined at the global scope as well, in which case + # the GC issue doesn't seem relevant. + context_var = ContextVar(f"werkzeug.LocalStack<{id(self)}>.storage") + + self._storage = context_var + + def __release_local__(self) -> None: + self._storage.set([]) + + def push(self, obj: T) -> list[T]: + """Add a new item to the top of the stack.""" + stack = self._storage.get([]).copy() + stack.append(obj) + self._storage.set(stack) + return stack + + def pop(self) -> T | None: + """Remove the top item from the stack and return it. If the + stack is empty, return ``None``. + """ + stack = self._storage.get([]) + + if len(stack) == 0: + return None + + rv = stack[-1] + self._storage.set(stack[:-1]) + return rv + + @property + def top(self) -> T | None: + """The topmost item on the stack. If the stack is empty, + `None` is returned. + """ + stack = self._storage.get([]) + + if len(stack) == 0: + return None + + return stack[-1] + + def __call__( + self, name: str | None = None, *, unbound_message: str | None = None + ) -> LocalProxy: + """Create a :class:`LocalProxy` that accesses the top of this + local stack. + + :param name: If given, the proxy access this attribute of the + top item, rather than the item itself. + :param unbound_message: The error message that the proxy will + show if the stack is empty. + """ + return LocalProxy(self, name, unbound_message=unbound_message) + + +class LocalManager: + """Manage releasing the data for the current context in one or more + :class:`Local` and :class:`LocalStack` objects. + + This should not be needed for modern use cases, and may be removed + in the future. + + :param locals: A local or list of locals to manage. + + .. versionchanged:: 2.1 + The ``ident_func`` was removed. + + .. versionchanged:: 0.7 + The ``ident_func`` parameter was added. + + .. versionchanged:: 0.6.1 + The :func:`release_local` function can be used instead of a + manager. + """ + + __slots__ = ("locals",) + + def __init__( + self, + locals: None | (Local | LocalStack | t.Iterable[Local | LocalStack]) = None, + ) -> None: + if locals is None: + self.locals = [] + elif isinstance(locals, Local): + self.locals = [locals] + else: + self.locals = list(locals) # type: ignore[arg-type] + + def cleanup(self) -> None: + """Release the data in the locals for this context. Call this at + the end of each request or use :meth:`make_middleware`. + """ + for local in self.locals: + release_local(local) + + def make_middleware(self, app: WSGIApplication) -> WSGIApplication: + """Wrap a WSGI application so that local data is released + automatically after the response has been sent for a request. + """ + + def application( + environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + return ClosingIterator(app(environ, start_response), self.cleanup) + + return application + + def middleware(self, func: WSGIApplication) -> WSGIApplication: + """Like :meth:`make_middleware` but used as a decorator on the + WSGI application function. + + .. code-block:: python + + @manager.middleware + def application(environ, start_response): + ... + """ + return update_wrapper(self.make_middleware(func), func) + + def __repr__(self) -> str: + return f"<{type(self).__name__} storages: {len(self.locals)}>" + + +class _ProxyLookup: + """Descriptor that handles proxied attribute lookup for + :class:`LocalProxy`. + + :param f: The built-in function this attribute is accessed through. + Instead of looking up the special method, the function call + is redone on the object. + :param fallback: Return this function if the proxy is unbound + instead of raising a :exc:`RuntimeError`. + :param is_attr: This proxied name is an attribute, not a function. + Call the fallback immediately to get the value. + :param class_value: Value to return when accessed from the + ``LocalProxy`` class directly. Used for ``__doc__`` so building + docs still works. + """ + + __slots__ = ("bind_f", "fallback", "is_attr", "class_value", "name") + + def __init__( + self, + f: t.Callable | None = None, + fallback: t.Callable | None = None, + class_value: t.Any | None = None, + is_attr: bool = False, + ) -> None: + bind_f: t.Callable[[LocalProxy, t.Any], t.Callable] | None + + if hasattr(f, "__get__"): + # A Python function, can be turned into a bound method. + + def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: + return f.__get__(obj, type(obj)) # type: ignore + + elif f is not None: + # A C function, use partial to bind the first argument. + + def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: + return partial(f, obj) + + else: + # Use getattr, which will produce a bound method. + bind_f = None + + self.bind_f = bind_f + self.fallback = fallback + self.class_value = class_value + self.is_attr = is_attr + + def __set_name__(self, owner: LocalProxy, name: str) -> None: + self.name = name + + def __get__(self, instance: LocalProxy, owner: type | None = None) -> t.Any: + if instance is None: + if self.class_value is not None: + return self.class_value + + return self + + try: + obj = instance._get_current_object() + except RuntimeError: + if self.fallback is None: + raise + + fallback = self.fallback.__get__(instance, owner) + + if self.is_attr: + # __class__ and __doc__ are attributes, not methods. + # Call the fallback to get the value. + return fallback() + + return fallback + + if self.bind_f is not None: + return self.bind_f(instance, obj) + + return getattr(obj, self.name) + + def __repr__(self) -> str: + return f"proxy {self.name}" + + def __call__(self, instance: LocalProxy, *args: t.Any, **kwargs: t.Any) -> t.Any: + """Support calling unbound methods from the class. For example, + this happens with ``copy.copy``, which does + ``type(x).__copy__(x)``. ``type(x)`` can't be proxied, so it + returns the proxy type and descriptor. + """ + return self.__get__(instance, type(instance))(*args, **kwargs) + + +class _ProxyIOp(_ProxyLookup): + """Look up an augmented assignment method on a proxied object. The + method is wrapped to return the proxy instead of the object. + """ + + __slots__ = () + + def __init__( + self, f: t.Callable | None = None, fallback: t.Callable | None = None + ) -> None: + super().__init__(f, fallback) + + def bind_f(instance: LocalProxy, obj: t.Any) -> t.Callable: + def i_op(self: t.Any, other: t.Any) -> LocalProxy: + f(self, other) # type: ignore + return instance + + return i_op.__get__(obj, type(obj)) # type: ignore + + self.bind_f = bind_f + + +def _l_to_r_op(op: F) -> F: + """Swap the argument order to turn an l-op into an r-op.""" + + def r_op(obj: t.Any, other: t.Any) -> t.Any: + return op(other, obj) + + return t.cast(F, r_op) + + +def _identity(o: T) -> T: + return o + + +class LocalProxy(t.Generic[T]): + """A proxy to the object bound to a context-local object. All + operations on the proxy are forwarded to the bound object. If no + object is bound, a ``RuntimeError`` is raised. + + :param local: The context-local object that provides the proxied + object. + :param name: Proxy this attribute from the proxied object. + :param unbound_message: The error message to show if the + context-local object is unbound. + + Proxy a :class:`~contextvars.ContextVar` to make it easier to + access. Pass a name to proxy that attribute. + + .. code-block:: python + + _request_var = ContextVar("request") + request = LocalProxy(_request_var) + session = LocalProxy(_request_var, "session") + + Proxy an attribute on a :class:`Local` namespace by calling the + local with the attribute name: + + .. code-block:: python + + data = Local() + user = data("user") + + Proxy the top item on a :class:`LocalStack` by calling the local. + Pass a name to proxy that attribute. + + .. code-block:: + + app_stack = LocalStack() + current_app = app_stack() + g = app_stack("g") + + Pass a function to proxy the return value from that function. This + was previously used to access attributes of local objects before + that was supported directly. + + .. code-block:: python + + session = LocalProxy(lambda: request.session) + + ``__repr__`` and ``__class__`` are proxied, so ``repr(x)`` and + ``isinstance(x, cls)`` will look like the proxied object. Use + ``issubclass(type(x), LocalProxy)`` to check if an object is a + proxy. + + .. code-block:: python + + repr(user) # <User admin> + isinstance(user, User) # True + issubclass(type(user), LocalProxy) # True + + .. versionchanged:: 2.2.2 + ``__wrapped__`` is set when wrapping an object, not only when + wrapping a function, to prevent doctest from failing. + + .. versionchanged:: 2.2 + Can proxy a ``ContextVar`` or ``LocalStack`` directly. + + .. versionchanged:: 2.2 + The ``name`` parameter can be used with any proxied object, not + only ``Local``. + + .. versionchanged:: 2.2 + Added the ``unbound_message`` parameter. + + .. versionchanged:: 2.0 + Updated proxied attributes and methods to reflect the current + data model. + + .. versionchanged:: 0.6.1 + The class can be instantiated with a callable. + """ + + __slots__ = ("__wrapped", "_get_current_object") + + _get_current_object: t.Callable[[], T] + """Return the current object this proxy is bound to. If the proxy is + unbound, this raises a ``RuntimeError``. + + This should be used if you need to pass the object to something that + doesn't understand the proxy. It can also be useful for performance + if you are accessing the object multiple times in a function, rather + than going through the proxy multiple times. + """ + + def __init__( + self, + local: ContextVar[T] | Local | LocalStack[T] | t.Callable[[], T], + name: str | None = None, + *, + unbound_message: str | None = None, + ) -> None: + if name is None: + get_name = _identity + else: + get_name = attrgetter(name) # type: ignore[assignment] + + if unbound_message is None: + unbound_message = "object is not bound" + + if isinstance(local, Local): + if name is None: + raise TypeError("'name' is required when proxying a 'Local' object.") + + def _get_current_object() -> T: + try: + return get_name(local) # type: ignore[return-value] + except AttributeError: + raise RuntimeError(unbound_message) from None + + elif isinstance(local, LocalStack): + + def _get_current_object() -> T: + obj = local.top + + if obj is None: + raise RuntimeError(unbound_message) + + return get_name(obj) + + elif isinstance(local, ContextVar): + + def _get_current_object() -> T: + try: + obj = local.get() + except LookupError: + raise RuntimeError(unbound_message) from None + + return get_name(obj) + + elif callable(local): + + def _get_current_object() -> T: + return get_name(local()) + + else: + raise TypeError(f"Don't know how to proxy '{type(local)}'.") + + object.__setattr__(self, "_LocalProxy__wrapped", local) + object.__setattr__(self, "_get_current_object", _get_current_object) + + __doc__ = _ProxyLookup( # type: ignore + class_value=__doc__, fallback=lambda self: type(self).__doc__, is_attr=True + ) + __wrapped__ = _ProxyLookup( + fallback=lambda self: self._LocalProxy__wrapped, is_attr=True + ) + # __del__ should only delete the proxy + __repr__ = _ProxyLookup( # type: ignore + repr, fallback=lambda self: f"<{type(self).__name__} unbound>" + ) + __str__ = _ProxyLookup(str) # type: ignore + __bytes__ = _ProxyLookup(bytes) + __format__ = _ProxyLookup() # type: ignore + __lt__ = _ProxyLookup(operator.lt) + __le__ = _ProxyLookup(operator.le) + __eq__ = _ProxyLookup(operator.eq) # type: ignore + __ne__ = _ProxyLookup(operator.ne) # type: ignore + __gt__ = _ProxyLookup(operator.gt) + __ge__ = _ProxyLookup(operator.ge) + __hash__ = _ProxyLookup(hash) # type: ignore + __bool__ = _ProxyLookup(bool, fallback=lambda self: False) + __getattr__ = _ProxyLookup(getattr) + # __getattribute__ triggered through __getattr__ + __setattr__ = _ProxyLookup(setattr) # type: ignore + __delattr__ = _ProxyLookup(delattr) # type: ignore + __dir__ = _ProxyLookup(dir, fallback=lambda self: []) # type: ignore + # __get__ (proxying descriptor not supported) + # __set__ (descriptor) + # __delete__ (descriptor) + # __set_name__ (descriptor) + # __objclass__ (descriptor) + # __slots__ used by proxy itself + # __dict__ (__getattr__) + # __weakref__ (__getattr__) + # __init_subclass__ (proxying metaclass not supported) + # __prepare__ (metaclass) + __class__ = _ProxyLookup( + fallback=lambda self: type(self), is_attr=True + ) # type: ignore + __instancecheck__ = _ProxyLookup(lambda self, other: isinstance(other, self)) + __subclasscheck__ = _ProxyLookup(lambda self, other: issubclass(other, self)) + # __class_getitem__ triggered through __getitem__ + __call__ = _ProxyLookup(lambda self, *args, **kwargs: self(*args, **kwargs)) + __len__ = _ProxyLookup(len) + __length_hint__ = _ProxyLookup(operator.length_hint) + __getitem__ = _ProxyLookup(operator.getitem) + __setitem__ = _ProxyLookup(operator.setitem) + __delitem__ = _ProxyLookup(operator.delitem) + # __missing__ triggered through __getitem__ + __iter__ = _ProxyLookup(iter) + __next__ = _ProxyLookup(next) + __reversed__ = _ProxyLookup(reversed) + __contains__ = _ProxyLookup(operator.contains) + __add__ = _ProxyLookup(operator.add) + __sub__ = _ProxyLookup(operator.sub) + __mul__ = _ProxyLookup(operator.mul) + __matmul__ = _ProxyLookup(operator.matmul) + __truediv__ = _ProxyLookup(operator.truediv) + __floordiv__ = _ProxyLookup(operator.floordiv) + __mod__ = _ProxyLookup(operator.mod) + __divmod__ = _ProxyLookup(divmod) + __pow__ = _ProxyLookup(pow) + __lshift__ = _ProxyLookup(operator.lshift) + __rshift__ = _ProxyLookup(operator.rshift) + __and__ = _ProxyLookup(operator.and_) + __xor__ = _ProxyLookup(operator.xor) + __or__ = _ProxyLookup(operator.or_) + __radd__ = _ProxyLookup(_l_to_r_op(operator.add)) + __rsub__ = _ProxyLookup(_l_to_r_op(operator.sub)) + __rmul__ = _ProxyLookup(_l_to_r_op(operator.mul)) + __rmatmul__ = _ProxyLookup(_l_to_r_op(operator.matmul)) + __rtruediv__ = _ProxyLookup(_l_to_r_op(operator.truediv)) + __rfloordiv__ = _ProxyLookup(_l_to_r_op(operator.floordiv)) + __rmod__ = _ProxyLookup(_l_to_r_op(operator.mod)) + __rdivmod__ = _ProxyLookup(_l_to_r_op(divmod)) + __rpow__ = _ProxyLookup(_l_to_r_op(pow)) + __rlshift__ = _ProxyLookup(_l_to_r_op(operator.lshift)) + __rrshift__ = _ProxyLookup(_l_to_r_op(operator.rshift)) + __rand__ = _ProxyLookup(_l_to_r_op(operator.and_)) + __rxor__ = _ProxyLookup(_l_to_r_op(operator.xor)) + __ror__ = _ProxyLookup(_l_to_r_op(operator.or_)) + __iadd__ = _ProxyIOp(operator.iadd) + __isub__ = _ProxyIOp(operator.isub) + __imul__ = _ProxyIOp(operator.imul) + __imatmul__ = _ProxyIOp(operator.imatmul) + __itruediv__ = _ProxyIOp(operator.itruediv) + __ifloordiv__ = _ProxyIOp(operator.ifloordiv) + __imod__ = _ProxyIOp(operator.imod) + __ipow__ = _ProxyIOp(operator.ipow) + __ilshift__ = _ProxyIOp(operator.ilshift) + __irshift__ = _ProxyIOp(operator.irshift) + __iand__ = _ProxyIOp(operator.iand) + __ixor__ = _ProxyIOp(operator.ixor) + __ior__ = _ProxyIOp(operator.ior) + __neg__ = _ProxyLookup(operator.neg) + __pos__ = _ProxyLookup(operator.pos) + __abs__ = _ProxyLookup(abs) + __invert__ = _ProxyLookup(operator.invert) + __complex__ = _ProxyLookup(complex) + __int__ = _ProxyLookup(int) + __float__ = _ProxyLookup(float) + __index__ = _ProxyLookup(operator.index) + __round__ = _ProxyLookup(round) + __trunc__ = _ProxyLookup(math.trunc) + __floor__ = _ProxyLookup(math.floor) + __ceil__ = _ProxyLookup(math.ceil) + __enter__ = _ProxyLookup() + __exit__ = _ProxyLookup() + __await__ = _ProxyLookup() + __aiter__ = _ProxyLookup() + __anext__ = _ProxyLookup() + __aenter__ = _ProxyLookup() + __aexit__ = _ProxyLookup() + __copy__ = _ProxyLookup(copy.copy) + __deepcopy__ = _ProxyLookup(copy.deepcopy) + # __getnewargs_ex__ (pickle through proxy not supported) + # __getnewargs__ (pickle) + # __getstate__ (pickle) + # __setstate__ (pickle) + # __reduce__ (pickle) + # __reduce_ex__ (pickle) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__init__.py b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f5cb2f6ad1f61d4ad3143bae5816ef69b89fb575 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b57d77e70075fca9da2aeda0c6e422e33515d73b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f63244ada0951beb177fbb15291a26c9dd322118 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/lint.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/lint.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..84385d3a93731135a788447c9ce62c91e11a619c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/lint.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..897f2e7ef74f1cdb75745b48e811c889f0455890 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/profiler.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..acf28ddd95058b9c104bc9268d0b73595b6b120a Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c00bba14a29e623b9cdf329f86a33f692d38862e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py new file mode 100644 index 0000000000000000000000000000000000000000..559fea585b014da707139b46e3ec76a65ff8b149 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/dispatcher.py @@ -0,0 +1,80 @@ +""" +Application Dispatcher +====================== + +This middleware creates a single WSGI application that dispatches to +multiple other WSGI applications mounted at different URL paths. + +A common example is writing a Single Page Application, where you have a +backend API and a frontend written in JavaScript that does the routing +in the browser rather than requesting different pages from the server. +The frontend is a single HTML and JS file that should be served for any +path besides "/api". + +This example dispatches to an API app under "/api", an admin app +under "/admin", and an app that serves frontend files for all other +requests:: + + app = DispatcherMiddleware(serve_frontend, { + '/api': api_app, + '/admin': admin_app, + }) + +In production, you might instead handle this at the HTTP server level, +serving files or proxying to application servers based on location. The +API and admin apps would each be deployed with a separate WSGI server, +and the static files would be served directly by the HTTP server. + +.. autoclass:: DispatcherMiddleware + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +from __future__ import annotations + +import typing as t + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class DispatcherMiddleware: + """Combine multiple applications as a single WSGI application. + Requests are dispatched to an application based on the path it is + mounted under. + + :param app: The WSGI application to dispatch to if the request + doesn't match a mounted path. + :param mounts: Maps path prefixes to applications for dispatching. + """ + + def __init__( + self, + app: WSGIApplication, + mounts: dict[str, WSGIApplication] | None = None, + ) -> None: + self.app = app + self.mounts = mounts or {} + + def __call__( + self, environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + script = environ.get("PATH_INFO", "") + path_info = "" + + while "/" in script: + if script in self.mounts: + app = self.mounts[script] + break + + script, last_item = script.rsplit("/", 1) + path_info = f"/{last_item}{path_info}" + else: + app = self.mounts.get(script, self.app) + + original_script_name = environ.get("SCRIPT_NAME", "") + environ["SCRIPT_NAME"] = original_script_name + script + environ["PATH_INFO"] = path_info + return app(environ, start_response) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..59ba9b32472c41bef0476d1073ab1b5397596d16 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/http_proxy.py @@ -0,0 +1,235 @@ +""" +Basic HTTP Proxy +================ + +.. autoclass:: ProxyMiddleware + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +from __future__ import annotations + +import typing as t +from http import client +from urllib.parse import quote +from urllib.parse import urlsplit + +from ..datastructures import EnvironHeaders +from ..http import is_hop_by_hop_header +from ..wsgi import get_input_stream + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class ProxyMiddleware: + """Proxy requests under a path to an external server, routing other + requests to the app. + + This middleware can only proxy HTTP requests, as HTTP is the only + protocol handled by the WSGI server. Other protocols, such as + WebSocket requests, cannot be proxied at this layer. This should + only be used for development, in production a real proxy server + should be used. + + The middleware takes a dict mapping a path prefix to a dict + describing the host to be proxied to:: + + app = ProxyMiddleware(app, { + "/static/": { + "target": "http://127.0.0.1:5001/", + } + }) + + Each host has the following options: + + ``target``: + The target URL to dispatch to. This is required. + ``remove_prefix``: + Whether to remove the prefix from the URL before dispatching it + to the target. The default is ``False``. + ``host``: + ``"<auto>"`` (default): + The host header is automatically rewritten to the URL of the + target. + ``None``: + The host header is unmodified from the client request. + Any other value: + The host header is overwritten with the value. + ``headers``: + A dictionary of headers to be sent with the request to the + target. The default is ``{}``. + ``ssl_context``: + A :class:`ssl.SSLContext` defining how to verify requests if the + target is HTTPS. The default is ``None``. + + In the example above, everything under ``"/static/"`` is proxied to + the server on port 5001. The host header is rewritten to the target, + and the ``"/static/"`` prefix is removed from the URLs. + + :param app: The WSGI application to wrap. + :param targets: Proxy target configurations. See description above. + :param chunk_size: Size of chunks to read from input stream and + write to target. + :param timeout: Seconds before an operation to a target fails. + + .. versionadded:: 0.14 + """ + + def __init__( + self, + app: WSGIApplication, + targets: t.Mapping[str, dict[str, t.Any]], + chunk_size: int = 2 << 13, + timeout: int = 10, + ) -> None: + def _set_defaults(opts: dict[str, t.Any]) -> dict[str, t.Any]: + opts.setdefault("remove_prefix", False) + opts.setdefault("host", "<auto>") + opts.setdefault("headers", {}) + opts.setdefault("ssl_context", None) + return opts + + self.app = app + self.targets = { + f"/{k.strip('/')}/": _set_defaults(v) for k, v in targets.items() + } + self.chunk_size = chunk_size + self.timeout = timeout + + def proxy_to( + self, opts: dict[str, t.Any], path: str, prefix: str + ) -> WSGIApplication: + target = urlsplit(opts["target"]) + # socket can handle unicode host, but header must be ascii + host = target.hostname.encode("idna").decode("ascii") + + def application( + environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + headers = list(EnvironHeaders(environ).items()) + headers[:] = [ + (k, v) + for k, v in headers + if not is_hop_by_hop_header(k) + and k.lower() not in ("content-length", "host") + ] + headers.append(("Connection", "close")) + + if opts["host"] == "<auto>": + headers.append(("Host", host)) + elif opts["host"] is None: + headers.append(("Host", environ["HTTP_HOST"])) + else: + headers.append(("Host", opts["host"])) + + headers.extend(opts["headers"].items()) + remote_path = path + + if opts["remove_prefix"]: + remote_path = remote_path[len(prefix) :].lstrip("/") + remote_path = f"{target.path.rstrip('/')}/{remote_path}" + + content_length = environ.get("CONTENT_LENGTH") + chunked = False + + if content_length not in ("", None): + headers.append(("Content-Length", content_length)) # type: ignore + elif content_length is not None: + headers.append(("Transfer-Encoding", "chunked")) + chunked = True + + try: + if target.scheme == "http": + con = client.HTTPConnection( + host, target.port or 80, timeout=self.timeout + ) + elif target.scheme == "https": + con = client.HTTPSConnection( + host, + target.port or 443, + timeout=self.timeout, + context=opts["ssl_context"], + ) + else: + raise RuntimeError( + "Target scheme must be 'http' or 'https', got" + f" {target.scheme!r}." + ) + + con.connect() + # safe = https://url.spec.whatwg.org/#url-path-segment-string + # as well as percent for things that are already quoted + remote_url = quote(remote_path, safe="!$&'()*+,/:;=@%") + querystring = environ["QUERY_STRING"] + + if querystring: + remote_url = f"{remote_url}?{querystring}" + + con.putrequest(environ["REQUEST_METHOD"], remote_url, skip_host=True) + + for k, v in headers: + if k.lower() == "connection": + v = "close" + + con.putheader(k, v) + + con.endheaders() + stream = get_input_stream(environ) + + while True: + data = stream.read(self.chunk_size) + + if not data: + break + + if chunked: + con.send(b"%x\r\n%s\r\n" % (len(data), data)) + else: + con.send(data) + + resp = con.getresponse() + except OSError: + from ..exceptions import BadGateway + + return BadGateway()(environ, start_response) + + start_response( + f"{resp.status} {resp.reason}", + [ + (k.title(), v) + for k, v in resp.getheaders() + if not is_hop_by_hop_header(k) + ], + ) + + def read() -> t.Iterator[bytes]: + while True: + try: + data = resp.read(self.chunk_size) + except OSError: + break + + if not data: + break + + yield data + + return read() + + return application + + def __call__( + self, environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + path = environ["PATH_INFO"] + app = self.app + + for prefix, opts in self.targets.items(): + if path.startswith(prefix): + app = self.proxy_to(opts, path, prefix) + break + + return app(environ, start_response) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/lint.py b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/lint.py new file mode 100644 index 0000000000000000000000000000000000000000..462959943ba2823f447ac76e8b8b7494e780dde0 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/lint.py @@ -0,0 +1,420 @@ +""" +WSGI Protocol Linter +==================== + +This module provides a middleware that performs sanity checks on the +behavior of the WSGI server and application. It checks that the +:pep:`3333` WSGI spec is properly implemented. It also warns on some +common HTTP errors such as non-empty responses for 304 status codes. + +.. autoclass:: LintMiddleware + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +from __future__ import annotations + +import typing as t +from types import TracebackType +from urllib.parse import urlparse +from warnings import warn + +from ..datastructures import Headers +from ..http import is_entity_header +from ..wsgi import FileWrapper + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class WSGIWarning(Warning): + """Warning class for WSGI warnings.""" + + +class HTTPWarning(Warning): + """Warning class for HTTP warnings.""" + + +def check_type(context: str, obj: object, need: t.Type = str) -> None: + if type(obj) is not need: + warn( + f"{context!r} requires {need.__name__!r}, got {type(obj).__name__!r}.", + WSGIWarning, + stacklevel=3, + ) + + +class InputStream: + def __init__(self, stream: t.IO[bytes]) -> None: + self._stream = stream + + def read(self, *args: t.Any) -> bytes: + if len(args) == 0: + warn( + "WSGI does not guarantee an EOF marker on the input stream, thus making" + " calls to 'wsgi.input.read()' unsafe. Conforming servers may never" + " return from this call.", + WSGIWarning, + stacklevel=2, + ) + elif len(args) != 1: + warn( + "Too many parameters passed to 'wsgi.input.read()'.", + WSGIWarning, + stacklevel=2, + ) + return self._stream.read(*args) + + def readline(self, *args: t.Any) -> bytes: + if len(args) == 0: + warn( + "Calls to 'wsgi.input.readline()' without arguments are unsafe. Use" + " 'wsgi.input.read()' instead.", + WSGIWarning, + stacklevel=2, + ) + elif len(args) == 1: + warn( + "'wsgi.input.readline()' was called with a size hint. WSGI does not" + " support this, although it's available on all major servers.", + WSGIWarning, + stacklevel=2, + ) + else: + raise TypeError("Too many arguments passed to 'wsgi.input.readline()'.") + return self._stream.readline(*args) + + def __iter__(self) -> t.Iterator[bytes]: + try: + return iter(self._stream) + except TypeError: + warn("'wsgi.input' is not iterable.", WSGIWarning, stacklevel=2) + return iter(()) + + def close(self) -> None: + warn("The application closed the input stream!", WSGIWarning, stacklevel=2) + self._stream.close() + + +class ErrorStream: + def __init__(self, stream: t.IO[str]) -> None: + self._stream = stream + + def write(self, s: str) -> None: + check_type("wsgi.error.write()", s, str) + self._stream.write(s) + + def flush(self) -> None: + self._stream.flush() + + def writelines(self, seq: t.Iterable[str]) -> None: + for line in seq: + self.write(line) + + def close(self) -> None: + warn("The application closed the error stream!", WSGIWarning, stacklevel=2) + self._stream.close() + + +class GuardedWrite: + def __init__(self, write: t.Callable[[bytes], object], chunks: list[int]) -> None: + self._write = write + self._chunks = chunks + + def __call__(self, s: bytes) -> None: + check_type("write()", s, bytes) + self._write(s) + self._chunks.append(len(s)) + + +class GuardedIterator: + def __init__( + self, + iterator: t.Iterable[bytes], + headers_set: tuple[int, Headers], + chunks: list[int], + ) -> None: + self._iterator = iterator + self._next = iter(iterator).__next__ + self.closed = False + self.headers_set = headers_set + self.chunks = chunks + + def __iter__(self) -> GuardedIterator: + return self + + def __next__(self) -> bytes: + if self.closed: + warn("Iterated over closed 'app_iter'.", WSGIWarning, stacklevel=2) + + rv = self._next() + + if not self.headers_set: + warn( + "The application returned before it started the response.", + WSGIWarning, + stacklevel=2, + ) + + check_type("application iterator items", rv, bytes) + self.chunks.append(len(rv)) + return rv + + def close(self) -> None: + self.closed = True + + if hasattr(self._iterator, "close"): + self._iterator.close() + + if self.headers_set: + status_code, headers = self.headers_set + bytes_sent = sum(self.chunks) + content_length = headers.get("content-length", type=int) + + if status_code == 304: + for key, _value in headers: + key = key.lower() + if key not in ("expires", "content-location") and is_entity_header( + key + ): + warn( + f"Entity header {key!r} found in 304 response.", HTTPWarning + ) + if bytes_sent: + warn("304 responses must not have a body.", HTTPWarning) + elif 100 <= status_code < 200 or status_code == 204: + if content_length != 0: + warn( + f"{status_code} responses must have an empty content length.", + HTTPWarning, + ) + if bytes_sent: + warn(f"{status_code} responses must not have a body.", HTTPWarning) + elif content_length is not None and content_length != bytes_sent: + warn( + "Content-Length and the number of bytes sent to the" + " client do not match.", + WSGIWarning, + ) + + def __del__(self) -> None: + if not self.closed: + try: + warn( + "Iterator was garbage collected before it was closed.", WSGIWarning + ) + except Exception: + pass + + +class LintMiddleware: + """Warns about common errors in the WSGI and HTTP behavior of the + server and wrapped application. Some of the issues it checks are: + + - invalid status codes + - non-bytes sent to the WSGI server + - strings returned from the WSGI application + - non-empty conditional responses + - unquoted etags + - relative URLs in the Location header + - unsafe calls to wsgi.input + - unclosed iterators + + Error information is emitted using the :mod:`warnings` module. + + :param app: The WSGI application to wrap. + + .. code-block:: python + + from werkzeug.middleware.lint import LintMiddleware + app = LintMiddleware(app) + """ + + def __init__(self, app: WSGIApplication) -> None: + self.app = app + + def check_environ(self, environ: WSGIEnvironment) -> None: + if type(environ) is not dict: + warn( + "WSGI environment is not a standard Python dict.", + WSGIWarning, + stacklevel=4, + ) + for key in ( + "REQUEST_METHOD", + "SERVER_NAME", + "SERVER_PORT", + "wsgi.version", + "wsgi.input", + "wsgi.errors", + "wsgi.multithread", + "wsgi.multiprocess", + "wsgi.run_once", + ): + if key not in environ: + warn( + f"Required environment key {key!r} not found", + WSGIWarning, + stacklevel=3, + ) + if environ["wsgi.version"] != (1, 0): + warn("Environ is not a WSGI 1.0 environ.", WSGIWarning, stacklevel=3) + + script_name = environ.get("SCRIPT_NAME", "") + path_info = environ.get("PATH_INFO", "") + + if script_name and script_name[0] != "/": + warn( + f"'SCRIPT_NAME' does not start with a slash: {script_name!r}", + WSGIWarning, + stacklevel=3, + ) + + if path_info and path_info[0] != "/": + warn( + f"'PATH_INFO' does not start with a slash: {path_info!r}", + WSGIWarning, + stacklevel=3, + ) + + def check_start_response( + self, + status: str, + headers: list[tuple[str, str]], + exc_info: None | (tuple[type[BaseException], BaseException, TracebackType]), + ) -> tuple[int, Headers]: + check_type("status", status, str) + status_code_str = status.split(None, 1)[0] + + if len(status_code_str) != 3 or not status_code_str.isdecimal(): + warn("Status code must be three digits.", WSGIWarning, stacklevel=3) + + if len(status) < 4 or status[3] != " ": + warn( + f"Invalid value for status {status!r}. Valid status strings are three" + " digits, a space and a status explanation.", + WSGIWarning, + stacklevel=3, + ) + + status_code = int(status_code_str) + + if status_code < 100: + warn("Status code < 100 detected.", WSGIWarning, stacklevel=3) + + if type(headers) is not list: + warn("Header list is not a list.", WSGIWarning, stacklevel=3) + + for item in headers: + if type(item) is not tuple or len(item) != 2: + warn("Header items must be 2-item tuples.", WSGIWarning, stacklevel=3) + name, value = item + if type(name) is not str or type(value) is not str: + warn( + "Header keys and values must be strings.", WSGIWarning, stacklevel=3 + ) + if name.lower() == "status": + warn( + "The status header is not supported due to" + " conflicts with the CGI spec.", + WSGIWarning, + stacklevel=3, + ) + + if exc_info is not None and not isinstance(exc_info, tuple): + warn("Invalid value for exc_info.", WSGIWarning, stacklevel=3) + + headers = Headers(headers) + self.check_headers(headers) + + return status_code, headers + + def check_headers(self, headers: Headers) -> None: + etag = headers.get("etag") + + if etag is not None: + if etag.startswith(("W/", "w/")): + if etag.startswith("w/"): + warn( + "Weak etag indicator should be upper case.", + HTTPWarning, + stacklevel=4, + ) + + etag = etag[2:] + + if not (etag[:1] == etag[-1:] == '"'): + warn("Unquoted etag emitted.", HTTPWarning, stacklevel=4) + + location = headers.get("location") + + if location is not None: + if not urlparse(location).netloc: + warn( + "Absolute URLs required for location header.", + HTTPWarning, + stacklevel=4, + ) + + def check_iterator(self, app_iter: t.Iterable[bytes]) -> None: + if isinstance(app_iter, str): + warn( + "The application returned a string. The response will send one" + " character at a time to the client, which will kill performance." + " Return a list or iterable instead.", + WSGIWarning, + stacklevel=3, + ) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Iterable[bytes]: + if len(args) != 2: + warn("A WSGI app takes two arguments.", WSGIWarning, stacklevel=2) + + if kwargs: + warn( + "A WSGI app does not take keyword arguments.", WSGIWarning, stacklevel=2 + ) + + environ: WSGIEnvironment = args[0] + start_response: StartResponse = args[1] + + self.check_environ(environ) + environ["wsgi.input"] = InputStream(environ["wsgi.input"]) + environ["wsgi.errors"] = ErrorStream(environ["wsgi.errors"]) + + # Hook our own file wrapper in so that applications will always + # iterate to the end and we can check the content length. + environ["wsgi.file_wrapper"] = FileWrapper + + headers_set: list[t.Any] = [] + chunks: list[int] = [] + + def checking_start_response( + *args: t.Any, **kwargs: t.Any + ) -> t.Callable[[bytes], None]: + if len(args) not in {2, 3}: + warn( + f"Invalid number of arguments: {len(args)}, expected 2 or 3.", + WSGIWarning, + stacklevel=2, + ) + + if kwargs: + warn("'start_response' does not take keyword arguments.", WSGIWarning) + + status: str = args[0] + headers: list[tuple[str, str]] = args[1] + exc_info: None | ( + tuple[type[BaseException], BaseException, TracebackType] + ) = (args[2] if len(args) == 3 else None) + + headers_set[:] = self.check_start_response(status, headers, exc_info) + return GuardedWrite(start_response(status, headers, exc_info), chunks) + + app_iter = self.app(environ, t.cast("StartResponse", checking_start_response)) + self.check_iterator(app_iter) + return GuardedIterator( + app_iter, t.cast(t.Tuple[int, Headers], headers_set), chunks + ) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/profiler.py b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/profiler.py new file mode 100644 index 0000000000000000000000000000000000000000..2d806154c463741414363009e0781a2300821796 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/profiler.py @@ -0,0 +1,141 @@ +""" +Application Profiler +==================== + +This module provides a middleware that profiles each request with the +:mod:`cProfile` module. This can help identify bottlenecks in your code +that may be slowing down your application. + +.. autoclass:: ProfilerMiddleware + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +from __future__ import annotations + +import os.path +import sys +import time +import typing as t +from pstats import Stats + +try: + from cProfile import Profile +except ImportError: + from profile import Profile # type: ignore + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class ProfilerMiddleware: + """Wrap a WSGI application and profile the execution of each + request. Responses are buffered so that timings are more exact. + + If ``stream`` is given, :class:`pstats.Stats` are written to it + after each request. If ``profile_dir`` is given, :mod:`cProfile` + data files are saved to that directory, one file per request. + + The filename can be customized by passing ``filename_format``. If + it is a string, it will be formatted using :meth:`str.format` with + the following fields available: + + - ``{method}`` - The request method; GET, POST, etc. + - ``{path}`` - The request path or 'root' should one not exist. + - ``{elapsed}`` - The elapsed time of the request. + - ``{time}`` - The time of the request. + + If it is a callable, it will be called with the WSGI ``environ`` + dict and should return a filename. + + :param app: The WSGI application to wrap. + :param stream: Write stats to this stream. Disable with ``None``. + :param sort_by: A tuple of columns to sort stats by. See + :meth:`pstats.Stats.sort_stats`. + :param restrictions: A tuple of restrictions to filter stats by. See + :meth:`pstats.Stats.print_stats`. + :param profile_dir: Save profile data files to this directory. + :param filename_format: Format string for profile data file names, + or a callable returning a name. See explanation above. + + .. code-block:: python + + from werkzeug.middleware.profiler import ProfilerMiddleware + app = ProfilerMiddleware(app) + + .. versionchanged:: 0.15 + Stats are written even if ``profile_dir`` is given, and can be + disable by passing ``stream=None``. + + .. versionadded:: 0.15 + Added ``filename_format``. + + .. versionadded:: 0.9 + Added ``restrictions`` and ``profile_dir``. + """ + + def __init__( + self, + app: WSGIApplication, + stream: t.IO[str] | None = sys.stdout, + sort_by: t.Iterable[str] = ("time", "calls"), + restrictions: t.Iterable[str | int | float] = (), + profile_dir: str | None = None, + filename_format: str = "{method}.{path}.{elapsed:.0f}ms.{time:.0f}.prof", + ) -> None: + self._app = app + self._stream = stream + self._sort_by = sort_by + self._restrictions = restrictions + self._profile_dir = profile_dir + self._filename_format = filename_format + + def __call__( + self, environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + response_body: list[bytes] = [] + + def catching_start_response(status, headers, exc_info=None): # type: ignore + start_response(status, headers, exc_info) + return response_body.append + + def runapp() -> None: + app_iter = self._app( + environ, t.cast("StartResponse", catching_start_response) + ) + response_body.extend(app_iter) + + if hasattr(app_iter, "close"): + app_iter.close() + + profile = Profile() + start = time.time() + profile.runcall(runapp) + body = b"".join(response_body) + elapsed = time.time() - start + + if self._profile_dir is not None: + if callable(self._filename_format): + filename = self._filename_format(environ) + else: + filename = self._filename_format.format( + method=environ["REQUEST_METHOD"], + path=environ["PATH_INFO"].strip("/").replace("/", ".") or "root", + elapsed=elapsed * 1000.0, + time=time.time(), + ) + filename = os.path.join(self._profile_dir, filename) + profile.dump_stats(filename) + + if self._stream is not None: + stats = Stats(profile, stream=self._stream) + stats.sort_stats(*self._sort_by) + print("-" * 80, file=self._stream) + path_info = environ.get("PATH_INFO", "") + print(f"PATH: {path_info!r}", file=self._stream) + stats.print_stats(*self._restrictions) + print(f"{'-' * 80}\n", file=self._stream) + + return [body] diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py new file mode 100644 index 0000000000000000000000000000000000000000..8dfbb36c0b27ac915845111269dbbadc12e88b06 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/proxy_fix.py @@ -0,0 +1,182 @@ +""" +X-Forwarded-For Proxy Fix +========================= + +This module provides a middleware that adjusts the WSGI environ based on +``X-Forwarded-`` headers that proxies in front of an application may +set. + +When an application is running behind a proxy server, WSGI may see the +request as coming from that server rather than the real client. Proxies +set various headers to track where the request actually came from. + +This middleware should only be used if the application is actually +behind such a proxy, and should be configured with the number of proxies +that are chained in front of it. Not all proxies set all the headers. +Since incoming headers can be faked, you must set how many proxies are +setting each header so the middleware knows what to trust. + +.. autoclass:: ProxyFix + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +from __future__ import annotations + +import typing as t + +from ..http import parse_list_header + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class ProxyFix: + """Adjust the WSGI environ based on ``X-Forwarded-`` that proxies in + front of the application may set. + + - ``X-Forwarded-For`` sets ``REMOTE_ADDR``. + - ``X-Forwarded-Proto`` sets ``wsgi.url_scheme``. + - ``X-Forwarded-Host`` sets ``HTTP_HOST``, ``SERVER_NAME``, and + ``SERVER_PORT``. + - ``X-Forwarded-Port`` sets ``HTTP_HOST`` and ``SERVER_PORT``. + - ``X-Forwarded-Prefix`` sets ``SCRIPT_NAME``. + + You must tell the middleware how many proxies set each header so it + knows what values to trust. It is a security issue to trust values + that came from the client rather than a proxy. + + The original values of the headers are stored in the WSGI + environ as ``werkzeug.proxy_fix.orig``, a dict. + + :param app: The WSGI application to wrap. + :param x_for: Number of values to trust for ``X-Forwarded-For``. + :param x_proto: Number of values to trust for ``X-Forwarded-Proto``. + :param x_host: Number of values to trust for ``X-Forwarded-Host``. + :param x_port: Number of values to trust for ``X-Forwarded-Port``. + :param x_prefix: Number of values to trust for + ``X-Forwarded-Prefix``. + + .. code-block:: python + + from werkzeug.middleware.proxy_fix import ProxyFix + # App is behind one proxy that sets the -For and -Host headers. + app = ProxyFix(app, x_for=1, x_host=1) + + .. versionchanged:: 1.0 + The ``num_proxies`` argument and attribute; the ``get_remote_addr`` method; and + the environ keys ``orig_remote_addr``, ``orig_wsgi_url_scheme``, and + ``orig_http_host`` were removed. + + .. versionchanged:: 0.15 + All headers support multiple values. Each header is configured with a separate + number of trusted proxies. + + .. versionchanged:: 0.15 + Original WSGI environ values are stored in the ``werkzeug.proxy_fix.orig`` dict. + + .. versionchanged:: 0.15 + Support ``X-Forwarded-Port`` and ``X-Forwarded-Prefix``. + + .. versionchanged:: 0.15 + ``X-Forwarded-Host`` and ``X-Forwarded-Port`` modify + ``SERVER_NAME`` and ``SERVER_PORT``. + """ + + def __init__( + self, + app: WSGIApplication, + x_for: int = 1, + x_proto: int = 1, + x_host: int = 0, + x_port: int = 0, + x_prefix: int = 0, + ) -> None: + self.app = app + self.x_for = x_for + self.x_proto = x_proto + self.x_host = x_host + self.x_port = x_port + self.x_prefix = x_prefix + + def _get_real_value(self, trusted: int, value: str | None) -> str | None: + """Get the real value from a list header based on the configured + number of trusted proxies. + + :param trusted: Number of values to trust in the header. + :param value: Comma separated list header value to parse. + :return: The real value, or ``None`` if there are fewer values + than the number of trusted proxies. + + .. versionchanged:: 1.0 + Renamed from ``_get_trusted_comma``. + + .. versionadded:: 0.15 + """ + if not (trusted and value): + return None + values = parse_list_header(value) + if len(values) >= trusted: + return values[-trusted] + return None + + def __call__( + self, environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + """Modify the WSGI environ based on the various ``Forwarded`` + headers before calling the wrapped application. Store the + original environ values in ``werkzeug.proxy_fix.orig_{key}``. + """ + environ_get = environ.get + orig_remote_addr = environ_get("REMOTE_ADDR") + orig_wsgi_url_scheme = environ_get("wsgi.url_scheme") + orig_http_host = environ_get("HTTP_HOST") + environ.update( + { + "werkzeug.proxy_fix.orig": { + "REMOTE_ADDR": orig_remote_addr, + "wsgi.url_scheme": orig_wsgi_url_scheme, + "HTTP_HOST": orig_http_host, + "SERVER_NAME": environ_get("SERVER_NAME"), + "SERVER_PORT": environ_get("SERVER_PORT"), + "SCRIPT_NAME": environ_get("SCRIPT_NAME"), + } + } + ) + + x_for = self._get_real_value(self.x_for, environ_get("HTTP_X_FORWARDED_FOR")) + if x_for: + environ["REMOTE_ADDR"] = x_for + + x_proto = self._get_real_value( + self.x_proto, environ_get("HTTP_X_FORWARDED_PROTO") + ) + if x_proto: + environ["wsgi.url_scheme"] = x_proto + + x_host = self._get_real_value(self.x_host, environ_get("HTTP_X_FORWARDED_HOST")) + if x_host: + environ["HTTP_HOST"] = environ["SERVER_NAME"] = x_host + # "]" to check for IPv6 address without port + if ":" in x_host and not x_host.endswith("]"): + environ["SERVER_NAME"], environ["SERVER_PORT"] = x_host.rsplit(":", 1) + + x_port = self._get_real_value(self.x_port, environ_get("HTTP_X_FORWARDED_PORT")) + if x_port: + host = environ.get("HTTP_HOST") + if host: + # "]" to check for IPv6 address without port + if ":" in host and not host.endswith("]"): + host = host.rsplit(":", 1)[0] + environ["HTTP_HOST"] = f"{host}:{x_port}" + environ["SERVER_PORT"] = x_port + + x_prefix = self._get_real_value( + self.x_prefix, environ_get("HTTP_X_FORWARDED_PREFIX") + ) + if x_prefix: + environ["SCRIPT_NAME"] = x_prefix + + return self.app(environ, start_response) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py new file mode 100644 index 0000000000000000000000000000000000000000..e3ec7cab86f09b28964334c22cb134b043992575 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/middleware/shared_data.py @@ -0,0 +1,282 @@ +""" +Serve Shared Static Files +========================= + +.. autoclass:: SharedDataMiddleware + :members: is_allowed + +:copyright: 2007 Pallets +:license: BSD-3-Clause +""" +from __future__ import annotations + +import importlib.util +import mimetypes +import os +import posixpath +import typing as t +from datetime import datetime +from datetime import timezone +from io import BytesIO +from time import time +from zlib import adler32 + +from ..http import http_date +from ..http import is_resource_modified +from ..security import safe_join +from ..utils import get_content_type +from ..wsgi import get_path_info +from ..wsgi import wrap_file + +_TOpener = t.Callable[[], t.Tuple[t.IO[bytes], datetime, int]] +_TLoader = t.Callable[[t.Optional[str]], t.Tuple[t.Optional[str], t.Optional[_TOpener]]] + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class SharedDataMiddleware: + + """A WSGI middleware which provides static content for development + environments or simple server setups. Its usage is quite simple:: + + import os + from werkzeug.middleware.shared_data import SharedDataMiddleware + + app = SharedDataMiddleware(app, { + '/shared': os.path.join(os.path.dirname(__file__), 'shared') + }) + + The contents of the folder ``./shared`` will now be available on + ``http://example.com/shared/``. This is pretty useful during development + because a standalone media server is not required. Files can also be + mounted on the root folder and still continue to use the application because + the shared data middleware forwards all unhandled requests to the + application, even if the requests are below one of the shared folders. + + If `pkg_resources` is available you can also tell the middleware to serve + files from package data:: + + app = SharedDataMiddleware(app, { + '/static': ('myapplication', 'static') + }) + + This will then serve the ``static`` folder in the `myapplication` + Python package. + + The optional `disallow` parameter can be a list of :func:`~fnmatch.fnmatch` + rules for files that are not accessible from the web. If `cache` is set to + `False` no caching headers are sent. + + Currently the middleware does not support non-ASCII filenames. If the + encoding on the file system happens to match the encoding of the URI it may + work but this could also be by accident. We strongly suggest using ASCII + only file names for static files. + + The middleware will guess the mimetype using the Python `mimetype` + module. If it's unable to figure out the charset it will fall back + to `fallback_mimetype`. + + :param app: the application to wrap. If you don't want to wrap an + application you can pass it :exc:`NotFound`. + :param exports: a list or dict of exported files and folders. + :param disallow: a list of :func:`~fnmatch.fnmatch` rules. + :param cache: enable or disable caching headers. + :param cache_timeout: the cache timeout in seconds for the headers. + :param fallback_mimetype: The fallback mimetype for unknown files. + + .. versionchanged:: 1.0 + The default ``fallback_mimetype`` is + ``application/octet-stream``. If a filename looks like a text + mimetype, the ``utf-8`` charset is added to it. + + .. versionadded:: 0.6 + Added ``fallback_mimetype``. + + .. versionchanged:: 0.5 + Added ``cache_timeout``. + """ + + def __init__( + self, + app: WSGIApplication, + exports: ( + dict[str, str | tuple[str, str]] + | t.Iterable[tuple[str, str | tuple[str, str]]] + ), + disallow: None = None, + cache: bool = True, + cache_timeout: int = 60 * 60 * 12, + fallback_mimetype: str = "application/octet-stream", + ) -> None: + self.app = app + self.exports: list[tuple[str, _TLoader]] = [] + self.cache = cache + self.cache_timeout = cache_timeout + + if isinstance(exports, dict): + exports = exports.items() + + for key, value in exports: + if isinstance(value, tuple): + loader = self.get_package_loader(*value) + elif isinstance(value, str): + if os.path.isfile(value): + loader = self.get_file_loader(value) + else: + loader = self.get_directory_loader(value) + else: + raise TypeError(f"unknown def {value!r}") + + self.exports.append((key, loader)) + + if disallow is not None: + from fnmatch import fnmatch + + self.is_allowed = lambda x: not fnmatch(x, disallow) + + self.fallback_mimetype = fallback_mimetype + + def is_allowed(self, filename: str) -> bool: + """Subclasses can override this method to disallow the access to + certain files. However by providing `disallow` in the constructor + this method is overwritten. + """ + return True + + def _opener(self, filename: str) -> _TOpener: + return lambda: ( + open(filename, "rb"), + datetime.fromtimestamp(os.path.getmtime(filename), tz=timezone.utc), + int(os.path.getsize(filename)), + ) + + def get_file_loader(self, filename: str) -> _TLoader: + return lambda x: (os.path.basename(filename), self._opener(filename)) + + def get_package_loader(self, package: str, package_path: str) -> _TLoader: + load_time = datetime.now(timezone.utc) + spec = importlib.util.find_spec(package) + reader = spec.loader.get_resource_reader(package) # type: ignore[union-attr] + + def loader( + path: str | None, + ) -> tuple[str | None, _TOpener | None]: + if path is None: + return None, None + + path = safe_join(package_path, path) + + if path is None: + return None, None + + basename = posixpath.basename(path) + + try: + resource = reader.open_resource(path) + except OSError: + return None, None + + if isinstance(resource, BytesIO): + return ( + basename, + lambda: (resource, load_time, len(resource.getvalue())), + ) + + return ( + basename, + lambda: ( + resource, + datetime.fromtimestamp( + os.path.getmtime(resource.name), tz=timezone.utc + ), + os.path.getsize(resource.name), + ), + ) + + return loader + + def get_directory_loader(self, directory: str) -> _TLoader: + def loader( + path: str | None, + ) -> tuple[str | None, _TOpener | None]: + if path is not None: + path = safe_join(directory, path) + + if path is None: + return None, None + else: + path = directory + + if os.path.isfile(path): + return os.path.basename(path), self._opener(path) + + return None, None + + return loader + + def generate_etag(self, mtime: datetime, file_size: int, real_filename: str) -> str: + real_filename = os.fsencode(real_filename) + timestamp = mtime.timestamp() + checksum = adler32(real_filename) & 0xFFFFFFFF + return f"wzsdm-{timestamp}-{file_size}-{checksum}" + + def __call__( + self, environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + path = get_path_info(environ) + file_loader = None + + for search_path, loader in self.exports: + if search_path == path: + real_filename, file_loader = loader(None) + + if file_loader is not None: + break + + if not search_path.endswith("/"): + search_path += "/" + + if path.startswith(search_path): + real_filename, file_loader = loader(path[len(search_path) :]) + + if file_loader is not None: + break + + if file_loader is None or not self.is_allowed(real_filename): # type: ignore + return self.app(environ, start_response) + + guessed_type = mimetypes.guess_type(real_filename) # type: ignore + mime_type = get_content_type(guessed_type[0] or self.fallback_mimetype, "utf-8") + f, mtime, file_size = file_loader() + + headers = [("Date", http_date())] + + if self.cache: + timeout = self.cache_timeout + etag = self.generate_etag(mtime, file_size, real_filename) # type: ignore + headers += [ + ("Etag", f'"{etag}"'), + ("Cache-Control", f"max-age={timeout}, public"), + ] + + if not is_resource_modified(environ, etag, last_modified=mtime): + f.close() + start_response("304 Not Modified", headers) + return [] + + headers.append(("Expires", http_date(time() + timeout))) + else: + headers.append(("Cache-Control", "public")) + + headers.extend( + ( + ("Content-Type", mime_type), + ("Content-Length", str(file_size)), + ("Last-Modified", http_date(mtime)), + ) + ) + start_response("200 OK", headers) + return wrap_file(environ, f) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/py.typed b/backend/test/lib/python3.8/site-packages/werkzeug/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/__init__.py b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..84b043fdf4611c6ee879eaeb5392cb9b2d55954b --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__init__.py @@ -0,0 +1,133 @@ +"""When it comes to combining multiple controller or view functions +(however you want to call them) you need a dispatcher. A simple way +would be applying regular expression tests on the ``PATH_INFO`` and +calling registered callback functions that return the value then. + +This module implements a much more powerful system than simple regular +expression matching because it can also convert values in the URLs and +build URLs. + +Here a simple example that creates a URL map for an application with +two subdomains (www and kb) and some URL rules: + +.. code-block:: python + + m = Map([ + # Static URLs + Rule('/', endpoint='static/index'), + Rule('/about', endpoint='static/about'), + Rule('/help', endpoint='static/help'), + # Knowledge Base + Subdomain('kb', [ + Rule('/', endpoint='kb/index'), + Rule('/browse/', endpoint='kb/browse'), + Rule('/browse/<int:id>/', endpoint='kb/browse'), + Rule('/browse/<int:id>/<int:page>', endpoint='kb/browse') + ]) + ], default_subdomain='www') + +If the application doesn't use subdomains it's perfectly fine to not set +the default subdomain and not use the `Subdomain` rule factory. The +endpoint in the rules can be anything, for example import paths or +unique identifiers. The WSGI application can use those endpoints to get the +handler for that URL. It doesn't have to be a string at all but it's +recommended. + +Now it's possible to create a URL adapter for one of the subdomains and +build URLs: + +.. code-block:: python + + c = m.bind('example.com') + + c.build("kb/browse", dict(id=42)) + 'http://kb.example.com/browse/42/' + + c.build("kb/browse", dict()) + 'http://kb.example.com/browse/' + + c.build("kb/browse", dict(id=42, page=3)) + 'http://kb.example.com/browse/42/3' + + c.build("static/about") + '/about' + + c.build("static/index", force_external=True) + 'http://www.example.com/' + + c = m.bind('example.com', subdomain='kb') + + c.build("static/about") + 'http://www.example.com/about' + +The first argument to bind is the server name *without* the subdomain. +Per default it will assume that the script is mounted on the root, but +often that's not the case so you can provide the real mount point as +second argument: + +.. code-block:: python + + c = m.bind('example.com', '/applications/example') + +The third argument can be the subdomain, if not given the default +subdomain is used. For more details about binding have a look at the +documentation of the `MapAdapter`. + +And here is how you can match URLs: + +.. code-block:: python + + c = m.bind('example.com') + + c.match("/") + ('static/index', {}) + + c.match("/about") + ('static/about', {}) + + c = m.bind('example.com', '/', 'kb') + + c.match("/") + ('kb/index', {}) + + c.match("/browse/42/23") + ('kb/browse', {'id': 42, 'page': 23}) + +If matching fails you get a ``NotFound`` exception, if the rule thinks +it's a good idea to redirect (for example because the URL was defined +to have a slash at the end but the request was missing that slash) it +will raise a ``RequestRedirect`` exception. Both are subclasses of +``HTTPException`` so you can use those errors as responses in the +application. + +If matching succeeded but the URL rule was incompatible to the given +method (for example there were only rules for ``GET`` and ``HEAD`` but +routing tried to match a ``POST`` request) a ``MethodNotAllowed`` +exception is raised. +""" +from .converters import AnyConverter as AnyConverter +from .converters import BaseConverter as BaseConverter +from .converters import FloatConverter as FloatConverter +from .converters import IntegerConverter as IntegerConverter +from .converters import PathConverter as PathConverter +from .converters import UnicodeConverter as UnicodeConverter +from .converters import UUIDConverter as UUIDConverter +from .converters import ValidationError as ValidationError +from .exceptions import BuildError as BuildError +from .exceptions import NoMatch as NoMatch +from .exceptions import RequestAliasRedirect as RequestAliasRedirect +from .exceptions import RequestPath as RequestPath +from .exceptions import RequestRedirect as RequestRedirect +from .exceptions import RoutingException as RoutingException +from .exceptions import WebsocketMismatch as WebsocketMismatch +from .map import Map as Map +from .map import MapAdapter as MapAdapter +from .matcher import StateMachineMatcher as StateMachineMatcher +from .rules import EndpointPrefix as EndpointPrefix +from .rules import parse_converter_args as parse_converter_args +from .rules import Rule as Rule +from .rules import RuleFactory as RuleFactory +from .rules import RuleTemplate as RuleTemplate +from .rules import RuleTemplateFactory as RuleTemplateFactory +from .rules import Subdomain as Subdomain +from .rules import Submount as Submount diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..97960ca139bfc59dd956079b1dfaca6119fb6dba Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/converters.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/converters.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0dc9c64af244325c530c5245643b3474265ba7cd Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/converters.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..18193b4f9eeb9e5c2b90a12d4f9f8d3051aa4b00 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/exceptions.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/map.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/map.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..359b775268367be97d8982212899fbafae11f9d1 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/map.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/matcher.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/matcher.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6abacb99d2137c442072333291e8087ebe14f606 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/matcher.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/rules.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/rules.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..92125274e1e3934f37533e57efa2ebcc3d1172f8 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/routing/__pycache__/rules.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/converters.py b/backend/test/lib/python3.8/site-packages/werkzeug/routing/converters.py new file mode 100644 index 0000000000000000000000000000000000000000..c59e2abcb4cb266bc093e02e4f876d81ca4a7680 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/routing/converters.py @@ -0,0 +1,270 @@ +from __future__ import annotations + +import re +import typing as t +import uuid +import warnings +from urllib.parse import quote + +if t.TYPE_CHECKING: + from .map import Map + + +class ValidationError(ValueError): + """Validation error. If a rule converter raises this exception the rule + does not match the current URL and the next URL is tried. + """ + + +class BaseConverter: + """Base class for all converters. + + .. versionchanged:: 2.3 + ``part_isolating`` defaults to ``False`` if ``regex`` contains a ``/``. + """ + + regex = "[^/]+" + weight = 100 + part_isolating = True + + def __init_subclass__(cls, **kwargs: t.Any) -> None: + super().__init_subclass__(**kwargs) + + # If the converter isn't inheriting its regex, disable part_isolating by default + # if the regex contains a / character. + if "regex" in cls.__dict__ and "part_isolating" not in cls.__dict__: + cls.part_isolating = "/" not in cls.regex + + def __init__(self, map: Map, *args: t.Any, **kwargs: t.Any) -> None: + self.map = map + + def to_python(self, value: str) -> t.Any: + return value + + def to_url(self, value: t.Any) -> str: + if isinstance(value, (bytes, bytearray)): + warnings.warn( + "Passing bytes as a URL value is deprecated and will not be supported" + " in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=7, + ) + return quote(value, safe="!$&'()*+,/:;=@") + + # safe = https://url.spec.whatwg.org/#url-path-segment-string + return quote(str(value), encoding=self.map.charset, safe="!$&'()*+,/:;=@") + + +class UnicodeConverter(BaseConverter): + """This converter is the default converter and accepts any string but + only one path segment. Thus the string can not include a slash. + + This is the default validator. + + Example:: + + Rule('/pages/<page>'), + Rule('/<string(length=2):lang_code>') + + :param map: the :class:`Map`. + :param minlength: the minimum length of the string. Must be greater + or equal 1. + :param maxlength: the maximum length of the string. + :param length: the exact length of the string. + """ + + def __init__( + self, + map: Map, + minlength: int = 1, + maxlength: int | None = None, + length: int | None = None, + ) -> None: + super().__init__(map) + if length is not None: + length_regex = f"{{{int(length)}}}" + else: + if maxlength is None: + maxlength_value = "" + else: + maxlength_value = str(int(maxlength)) + length_regex = f"{{{int(minlength)},{maxlength_value}}}" + self.regex = f"[^/]{length_regex}" + + +class AnyConverter(BaseConverter): + """Matches one of the items provided. Items can either be Python + identifiers or strings:: + + Rule('/<any(about, help, imprint, class, "foo,bar"):page_name>') + + :param map: the :class:`Map`. + :param items: this function accepts the possible items as positional + arguments. + + .. versionchanged:: 2.2 + Value is validated when building a URL. + """ + + def __init__(self, map: Map, *items: str) -> None: + super().__init__(map) + self.items = set(items) + self.regex = f"(?:{'|'.join([re.escape(x) for x in items])})" + + def to_url(self, value: t.Any) -> str: + if value in self.items: + return str(value) + + valid_values = ", ".join(f"'{item}'" for item in sorted(self.items)) + raise ValueError(f"'{value}' is not one of {valid_values}") + + +class PathConverter(BaseConverter): + """Like the default :class:`UnicodeConverter`, but it also matches + slashes. This is useful for wikis and similar applications:: + + Rule('/<path:wikipage>') + Rule('/<path:wikipage>/edit') + + :param map: the :class:`Map`. + """ + + regex = "[^/].*?" + weight = 200 + + +class NumberConverter(BaseConverter): + """Baseclass for `IntegerConverter` and `FloatConverter`. + + :internal: + """ + + weight = 50 + num_convert: t.Callable = int + + def __init__( + self, + map: Map, + fixed_digits: int = 0, + min: int | None = None, + max: int | None = None, + signed: bool = False, + ) -> None: + if signed: + self.regex = self.signed_regex + super().__init__(map) + self.fixed_digits = fixed_digits + self.min = min + self.max = max + self.signed = signed + + def to_python(self, value: str) -> t.Any: + if self.fixed_digits and len(value) != self.fixed_digits: + raise ValidationError() + value = self.num_convert(value) + if (self.min is not None and value < self.min) or ( + self.max is not None and value > self.max + ): + raise ValidationError() + return value + + def to_url(self, value: t.Any) -> str: + value = str(self.num_convert(value)) + if self.fixed_digits: + value = value.zfill(self.fixed_digits) + return value + + @property + def signed_regex(self) -> str: + return f"-?{self.regex}" + + +class IntegerConverter(NumberConverter): + """This converter only accepts integer values:: + + Rule("/page/<int:page>") + + By default it only accepts unsigned, positive values. The ``signed`` + parameter will enable signed, negative values. :: + + Rule("/page/<int(signed=True):page>") + + :param map: The :class:`Map`. + :param fixed_digits: The number of fixed digits in the URL. If you + set this to ``4`` for example, the rule will only match if the + URL looks like ``/0001/``. The default is variable length. + :param min: The minimal value. + :param max: The maximal value. + :param signed: Allow signed (negative) values. + + .. versionadded:: 0.15 + The ``signed`` parameter. + """ + + regex = r"\d+" + + +class FloatConverter(NumberConverter): + """This converter only accepts floating point values:: + + Rule("/probability/<float:probability>") + + By default it only accepts unsigned, positive values. The ``signed`` + parameter will enable signed, negative values. :: + + Rule("/offset/<float(signed=True):offset>") + + :param map: The :class:`Map`. + :param min: The minimal value. + :param max: The maximal value. + :param signed: Allow signed (negative) values. + + .. versionadded:: 0.15 + The ``signed`` parameter. + """ + + regex = r"\d+\.\d+" + num_convert = float + + def __init__( + self, + map: Map, + min: float | None = None, + max: float | None = None, + signed: bool = False, + ) -> None: + super().__init__(map, min=min, max=max, signed=signed) # type: ignore + + +class UUIDConverter(BaseConverter): + """This converter only accepts UUID strings:: + + Rule('/object/<uuid:identifier>') + + .. versionadded:: 0.10 + + :param map: the :class:`Map`. + """ + + regex = ( + r"[A-Fa-f0-9]{8}-[A-Fa-f0-9]{4}-" + r"[A-Fa-f0-9]{4}-[A-Fa-f0-9]{4}-[A-Fa-f0-9]{12}" + ) + + def to_python(self, value: str) -> uuid.UUID: + return uuid.UUID(value) + + def to_url(self, value: uuid.UUID) -> str: + return str(value) + + +#: the default converter mapping for the map. +DEFAULT_CONVERTERS: t.Mapping[str, type[BaseConverter]] = { + "default": UnicodeConverter, + "string": UnicodeConverter, + "any": AnyConverter, + "path": PathConverter, + "int": IntegerConverter, + "float": FloatConverter, + "uuid": UUIDConverter, +} diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/exceptions.py b/backend/test/lib/python3.8/site-packages/werkzeug/routing/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..9d0a5281b8c4ed5f9f4d309ad0831daee06c401a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/routing/exceptions.py @@ -0,0 +1,148 @@ +from __future__ import annotations + +import difflib +import typing as t + +from ..exceptions import BadRequest +from ..exceptions import HTTPException +from ..utils import cached_property +from ..utils import redirect + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIEnvironment + from .map import MapAdapter + from .rules import Rule + from ..wrappers.request import Request + from ..wrappers.response import Response + + +class RoutingException(Exception): + """Special exceptions that require the application to redirect, notifying + about missing urls, etc. + + :internal: + """ + + +class RequestRedirect(HTTPException, RoutingException): + """Raise if the map requests a redirect. This is for example the case if + `strict_slashes` are activated and an url that requires a trailing slash. + + The attribute `new_url` contains the absolute destination url. + """ + + code = 308 + + def __init__(self, new_url: str) -> None: + super().__init__(new_url) + self.new_url = new_url + + def get_response( + self, + environ: WSGIEnvironment | Request | None = None, + scope: dict | None = None, + ) -> Response: + return redirect(self.new_url, self.code) + + +class RequestPath(RoutingException): + """Internal exception.""" + + __slots__ = ("path_info",) + + def __init__(self, path_info: str) -> None: + super().__init__() + self.path_info = path_info + + +class RequestAliasRedirect(RoutingException): # noqa: B903 + """This rule is an alias and wants to redirect to the canonical URL.""" + + def __init__(self, matched_values: t.Mapping[str, t.Any], endpoint: str) -> None: + super().__init__() + self.matched_values = matched_values + self.endpoint = endpoint + + +class BuildError(RoutingException, LookupError): + """Raised if the build system cannot find a URL for an endpoint with the + values provided. + """ + + def __init__( + self, + endpoint: str, + values: t.Mapping[str, t.Any], + method: str | None, + adapter: MapAdapter | None = None, + ) -> None: + super().__init__(endpoint, values, method) + self.endpoint = endpoint + self.values = values + self.method = method + self.adapter = adapter + + @cached_property + def suggested(self) -> Rule | None: + return self.closest_rule(self.adapter) + + def closest_rule(self, adapter: MapAdapter | None) -> Rule | None: + def _score_rule(rule: Rule) -> float: + return sum( + [ + 0.98 + * difflib.SequenceMatcher( + None, rule.endpoint, self.endpoint + ).ratio(), + 0.01 * bool(set(self.values or ()).issubset(rule.arguments)), + 0.01 * bool(rule.methods and self.method in rule.methods), + ] + ) + + if adapter and adapter.map._rules: + return max(adapter.map._rules, key=_score_rule) + + return None + + def __str__(self) -> str: + message = [f"Could not build url for endpoint {self.endpoint!r}"] + if self.method: + message.append(f" ({self.method!r})") + if self.values: + message.append(f" with values {sorted(self.values)!r}") + message.append(".") + if self.suggested: + if self.endpoint == self.suggested.endpoint: + if ( + self.method + and self.suggested.methods is not None + and self.method not in self.suggested.methods + ): + message.append( + " Did you mean to use methods" + f" {sorted(self.suggested.methods)!r}?" + ) + missing_values = self.suggested.arguments.union( + set(self.suggested.defaults or ()) + ) - set(self.values.keys()) + if missing_values: + message.append( + f" Did you forget to specify values {sorted(missing_values)!r}?" + ) + else: + message.append(f" Did you mean {self.suggested.endpoint!r} instead?") + return "".join(message) + + +class WebsocketMismatch(BadRequest): + """The only matched rule is either a WebSocket and the request is + HTTP, or the rule is HTTP and the request is a WebSocket. + """ + + +class NoMatch(Exception): + __slots__ = ("have_match_for", "websocket_mismatch") + + def __init__(self, have_match_for: set[str], websocket_mismatch: bool) -> None: + self.have_match_for = have_match_for + self.websocket_mismatch = websocket_mismatch diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/map.py b/backend/test/lib/python3.8/site-packages/werkzeug/routing/map.py new file mode 100644 index 0000000000000000000000000000000000000000..0d02bb8b72d29b9ac25c44f19058905fd1d0528c --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/routing/map.py @@ -0,0 +1,977 @@ +from __future__ import annotations + +import typing as t +import warnings +from pprint import pformat +from threading import Lock +from urllib.parse import quote +from urllib.parse import urljoin +from urllib.parse import urlunsplit + +from .._internal import _get_environ +from .._internal import _wsgi_decoding_dance +from ..datastructures import ImmutableDict +from ..datastructures import MultiDict +from ..exceptions import BadHost +from ..exceptions import HTTPException +from ..exceptions import MethodNotAllowed +from ..exceptions import NotFound +from ..urls import _urlencode +from ..wsgi import get_host +from .converters import DEFAULT_CONVERTERS +from .exceptions import BuildError +from .exceptions import NoMatch +from .exceptions import RequestAliasRedirect +from .exceptions import RequestPath +from .exceptions import RequestRedirect +from .exceptions import WebsocketMismatch +from .matcher import StateMachineMatcher +from .rules import _simple_rule_re +from .rules import Rule + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + from .converters import BaseConverter + from .rules import RuleFactory + from ..wrappers.request import Request + + +class Map: + """The map class stores all the URL rules and some configuration + parameters. Some of the configuration values are only stored on the + `Map` instance since those affect all rules, others are just defaults + and can be overridden for each rule. Note that you have to specify all + arguments besides the `rules` as keyword arguments! + + :param rules: sequence of url rules for this map. + :param default_subdomain: The default subdomain for rules without a + subdomain defined. + :param charset: charset of the url. defaults to ``"utf-8"`` + :param strict_slashes: If a rule ends with a slash but the matched + URL does not, redirect to the URL with a trailing slash. + :param merge_slashes: Merge consecutive slashes when matching or + building URLs. Matches will redirect to the normalized URL. + Slashes in variable parts are not merged. + :param redirect_defaults: This will redirect to the default rule if it + wasn't visited that way. This helps creating + unique URLs. + :param converters: A dict of converters that adds additional converters + to the list of converters. If you redefine one + converter this will override the original one. + :param sort_parameters: If set to `True` the url parameters are sorted. + See `url_encode` for more details. + :param sort_key: The sort key function for `url_encode`. + :param encoding_errors: the error method to use for decoding + :param host_matching: if set to `True` it enables the host matching + feature and disables the subdomain one. If + enabled the `host` parameter to rules is used + instead of the `subdomain` one. + + .. versionchanged:: 2.3 + The ``charset`` and ``encoding_errors`` parameters are deprecated and will be + removed in Werkzeug 3.0. + + .. versionchanged:: 1.0 + If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules will match. + + .. versionchanged:: 1.0 + The ``merge_slashes`` parameter was added. + + .. versionchanged:: 0.7 + The ``encoding_errors`` and ``host_matching`` parameters were added. + + .. versionchanged:: 0.5 + The ``sort_parameters`` and ``sort_key`` paramters were added. + """ + + #: A dict of default converters to be used. + default_converters = ImmutableDict(DEFAULT_CONVERTERS) + + #: The type of lock to use when updating. + #: + #: .. versionadded:: 1.0 + lock_class = Lock + + def __init__( + self, + rules: t.Iterable[RuleFactory] | None = None, + default_subdomain: str = "", + charset: str | None = None, + strict_slashes: bool = True, + merge_slashes: bool = True, + redirect_defaults: bool = True, + converters: t.Mapping[str, type[BaseConverter]] | None = None, + sort_parameters: bool = False, + sort_key: t.Callable[[t.Any], t.Any] | None = None, + encoding_errors: str | None = None, + host_matching: bool = False, + ) -> None: + self._matcher = StateMachineMatcher(merge_slashes) + self._rules_by_endpoint: dict[str, list[Rule]] = {} + self._remap = True + self._remap_lock = self.lock_class() + + self.default_subdomain = default_subdomain + + if charset is not None: + warnings.warn( + "The 'charset' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + charset = "utf-8" + + self.charset = charset + + if encoding_errors is not None: + warnings.warn( + "The 'encoding_errors' parameter is deprecated and will be" + " removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + encoding_errors = "replace" + + self.encoding_errors = encoding_errors + self.strict_slashes = strict_slashes + self.merge_slashes = merge_slashes + self.redirect_defaults = redirect_defaults + self.host_matching = host_matching + + self.converters = self.default_converters.copy() + if converters: + self.converters.update(converters) + + self.sort_parameters = sort_parameters + self.sort_key = sort_key + + for rulefactory in rules or (): + self.add(rulefactory) + + def is_endpoint_expecting(self, endpoint: str, *arguments: str) -> bool: + """Iterate over all rules and check if the endpoint expects + the arguments provided. This is for example useful if you have + some URLs that expect a language code and others that do not and + you want to wrap the builder a bit so that the current language + code is automatically added if not provided but endpoints expect + it. + + :param endpoint: the endpoint to check. + :param arguments: this function accepts one or more arguments + as positional arguments. Each one of them is + checked. + """ + self.update() + arguments = set(arguments) + for rule in self._rules_by_endpoint[endpoint]: + if arguments.issubset(rule.arguments): + return True + return False + + @property + def _rules(self) -> list[Rule]: + return [rule for rules in self._rules_by_endpoint.values() for rule in rules] + + def iter_rules(self, endpoint: str | None = None) -> t.Iterator[Rule]: + """Iterate over all rules or the rules of an endpoint. + + :param endpoint: if provided only the rules for that endpoint + are returned. + :return: an iterator + """ + self.update() + if endpoint is not None: + return iter(self._rules_by_endpoint[endpoint]) + return iter(self._rules) + + def add(self, rulefactory: RuleFactory) -> None: + """Add a new rule or factory to the map and bind it. Requires that the + rule is not bound to another map. + + :param rulefactory: a :class:`Rule` or :class:`RuleFactory` + """ + for rule in rulefactory.get_rules(self): + rule.bind(self) + if not rule.build_only: + self._matcher.add(rule) + self._rules_by_endpoint.setdefault(rule.endpoint, []).append(rule) + self._remap = True + + def bind( + self, + server_name: str, + script_name: str | None = None, + subdomain: str | None = None, + url_scheme: str = "http", + default_method: str = "GET", + path_info: str | None = None, + query_args: t.Mapping[str, t.Any] | str | None = None, + ) -> MapAdapter: + """Return a new :class:`MapAdapter` with the details specified to the + call. Note that `script_name` will default to ``'/'`` if not further + specified or `None`. The `server_name` at least is a requirement + because the HTTP RFC requires absolute URLs for redirects and so all + redirect exceptions raised by Werkzeug will contain the full canonical + URL. + + If no path_info is passed to :meth:`match` it will use the default path + info passed to bind. While this doesn't really make sense for + manual bind calls, it's useful if you bind a map to a WSGI + environment which already contains the path info. + + `subdomain` will default to the `default_subdomain` for this map if + no defined. If there is no `default_subdomain` you cannot use the + subdomain feature. + + .. versionchanged:: 1.0 + If ``url_scheme`` is ``ws`` or ``wss``, only WebSocket rules + will match. + + .. versionchanged:: 0.15 + ``path_info`` defaults to ``'/'`` if ``None``. + + .. versionchanged:: 0.8 + ``query_args`` can be a string. + + .. versionchanged:: 0.7 + Added ``query_args``. + """ + server_name = server_name.lower() + if self.host_matching: + if subdomain is not None: + raise RuntimeError("host matching enabled and a subdomain was provided") + elif subdomain is None: + subdomain = self.default_subdomain + if script_name is None: + script_name = "/" + if path_info is None: + path_info = "/" + + # Port isn't part of IDNA, and might push a name over the 63 octet limit. + server_name, port_sep, port = server_name.partition(":") + + try: + server_name = server_name.encode("idna").decode("ascii") + except UnicodeError as e: + raise BadHost() from e + + return MapAdapter( + self, + f"{server_name}{port_sep}{port}", + script_name, + subdomain, + url_scheme, + path_info, + default_method, + query_args, + ) + + def bind_to_environ( + self, + environ: WSGIEnvironment | Request, + server_name: str | None = None, + subdomain: str | None = None, + ) -> MapAdapter: + """Like :meth:`bind` but you can pass it an WSGI environment and it + will fetch the information from that dictionary. Note that because of + limitations in the protocol there is no way to get the current + subdomain and real `server_name` from the environment. If you don't + provide it, Werkzeug will use `SERVER_NAME` and `SERVER_PORT` (or + `HTTP_HOST` if provided) as used `server_name` with disabled subdomain + feature. + + If `subdomain` is `None` but an environment and a server name is + provided it will calculate the current subdomain automatically. + Example: `server_name` is ``'example.com'`` and the `SERVER_NAME` + in the wsgi `environ` is ``'staging.dev.example.com'`` the calculated + subdomain will be ``'staging.dev'``. + + If the object passed as environ has an environ attribute, the value of + this attribute is used instead. This allows you to pass request + objects. Additionally `PATH_INFO` added as a default of the + :class:`MapAdapter` so that you don't have to pass the path info to + the match method. + + .. versionchanged:: 1.0.0 + If the passed server name specifies port 443, it will match + if the incoming scheme is ``https`` without a port. + + .. versionchanged:: 1.0.0 + A warning is shown when the passed server name does not + match the incoming WSGI server name. + + .. versionchanged:: 0.8 + This will no longer raise a ValueError when an unexpected server + name was passed. + + .. versionchanged:: 0.5 + previously this method accepted a bogus `calculate_subdomain` + parameter that did not have any effect. It was removed because + of that. + + :param environ: a WSGI environment. + :param server_name: an optional server name hint (see above). + :param subdomain: optionally the current subdomain (see above). + """ + env = _get_environ(environ) + wsgi_server_name = get_host(env).lower() + scheme = env["wsgi.url_scheme"] + upgrade = any( + v.strip() == "upgrade" + for v in env.get("HTTP_CONNECTION", "").lower().split(",") + ) + + if upgrade and env.get("HTTP_UPGRADE", "").lower() == "websocket": + scheme = "wss" if scheme == "https" else "ws" + + if server_name is None: + server_name = wsgi_server_name + else: + server_name = server_name.lower() + + # strip standard port to match get_host() + if scheme in {"http", "ws"} and server_name.endswith(":80"): + server_name = server_name[:-3] + elif scheme in {"https", "wss"} and server_name.endswith(":443"): + server_name = server_name[:-4] + + if subdomain is None and not self.host_matching: + cur_server_name = wsgi_server_name.split(".") + real_server_name = server_name.split(".") + offset = -len(real_server_name) + + if cur_server_name[offset:] != real_server_name: + # This can happen even with valid configs if the server was + # accessed directly by IP address under some situations. + # Instead of raising an exception like in Werkzeug 0.7 or + # earlier we go by an invalid subdomain which will result + # in a 404 error on matching. + warnings.warn( + f"Current server name {wsgi_server_name!r} doesn't match configured" + f" server name {server_name!r}", + stacklevel=2, + ) + subdomain = "<invalid>" + else: + subdomain = ".".join(filter(None, cur_server_name[:offset])) + + def _get_wsgi_string(name: str) -> str | None: + val = env.get(name) + if val is not None: + return _wsgi_decoding_dance(val, self.charset) + return None + + script_name = _get_wsgi_string("SCRIPT_NAME") + path_info = _get_wsgi_string("PATH_INFO") + query_args = _get_wsgi_string("QUERY_STRING") + return Map.bind( + self, + server_name, + script_name, + subdomain, + scheme, + env["REQUEST_METHOD"], + path_info, + query_args=query_args, + ) + + def update(self) -> None: + """Called before matching and building to keep the compiled rules + in the correct order after things changed. + """ + if not self._remap: + return + + with self._remap_lock: + if not self._remap: + return + + self._matcher.update() + for rules in self._rules_by_endpoint.values(): + rules.sort(key=lambda x: x.build_compare_key()) + self._remap = False + + def __repr__(self) -> str: + rules = self.iter_rules() + return f"{type(self).__name__}({pformat(list(rules))})" + + +class MapAdapter: + + """Returned by :meth:`Map.bind` or :meth:`Map.bind_to_environ` and does + the URL matching and building based on runtime information. + """ + + def __init__( + self, + map: Map, + server_name: str, + script_name: str, + subdomain: str | None, + url_scheme: str, + path_info: str, + default_method: str, + query_args: t.Mapping[str, t.Any] | str | None = None, + ): + self.map = map + self.server_name = server_name + + if not script_name.endswith("/"): + script_name += "/" + + self.script_name = script_name + self.subdomain = subdomain + self.url_scheme = url_scheme + self.path_info = path_info + self.default_method = default_method + self.query_args = query_args + self.websocket = self.url_scheme in {"ws", "wss"} + + def dispatch( + self, + view_func: t.Callable[[str, t.Mapping[str, t.Any]], WSGIApplication], + path_info: str | None = None, + method: str | None = None, + catch_http_exceptions: bool = False, + ) -> WSGIApplication: + """Does the complete dispatching process. `view_func` is called with + the endpoint and a dict with the values for the view. It should + look up the view function, call it, and return a response object + or WSGI application. http exceptions are not caught by default + so that applications can display nicer error messages by just + catching them by hand. If you want to stick with the default + error messages you can pass it ``catch_http_exceptions=True`` and + it will catch the http exceptions. + + Here a small example for the dispatch usage:: + + from werkzeug.wrappers import Request, Response + from werkzeug.wsgi import responder + from werkzeug.routing import Map, Rule + + def on_index(request): + return Response('Hello from the index') + + url_map = Map([Rule('/', endpoint='index')]) + views = {'index': on_index} + + @responder + def application(environ, start_response): + request = Request(environ) + urls = url_map.bind_to_environ(environ) + return urls.dispatch(lambda e, v: views[e](request, **v), + catch_http_exceptions=True) + + Keep in mind that this method might return exception objects, too, so + use :class:`Response.force_type` to get a response object. + + :param view_func: a function that is called with the endpoint as + first argument and the value dict as second. Has + to dispatch to the actual view function with this + information. (see above) + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + :param catch_http_exceptions: set to `True` to catch any of the + werkzeug :class:`HTTPException`\\s. + """ + try: + try: + endpoint, args = self.match(path_info, method) + except RequestRedirect as e: + return e + return view_func(endpoint, args) + except HTTPException as e: + if catch_http_exceptions: + return e + raise + + @t.overload + def match( # type: ignore + self, + path_info: str | None = None, + method: str | None = None, + return_rule: t.Literal[False] = False, + query_args: t.Mapping[str, t.Any] | str | None = None, + websocket: bool | None = None, + ) -> tuple[str, t.Mapping[str, t.Any]]: + ... + + @t.overload + def match( + self, + path_info: str | None = None, + method: str | None = None, + return_rule: t.Literal[True] = True, + query_args: t.Mapping[str, t.Any] | str | None = None, + websocket: bool | None = None, + ) -> tuple[Rule, t.Mapping[str, t.Any]]: + ... + + def match( + self, + path_info: str | None = None, + method: str | None = None, + return_rule: bool = False, + query_args: t.Mapping[str, t.Any] | str | None = None, + websocket: bool | None = None, + ) -> tuple[str | Rule, t.Mapping[str, t.Any]]: + """The usage is simple: you just pass the match method the current + path info as well as the method (which defaults to `GET`). The + following things can then happen: + + - you receive a `NotFound` exception that indicates that no URL is + matching. A `NotFound` exception is also a WSGI application you + can call to get a default page not found page (happens to be the + same object as `werkzeug.exceptions.NotFound`) + + - you receive a `MethodNotAllowed` exception that indicates that there + is a match for this URL but not for the current request method. + This is useful for RESTful applications. + + - you receive a `RequestRedirect` exception with a `new_url` + attribute. This exception is used to notify you about a request + Werkzeug requests from your WSGI application. This is for example the + case if you request ``/foo`` although the correct URL is ``/foo/`` + You can use the `RequestRedirect` instance as response-like object + similar to all other subclasses of `HTTPException`. + + - you receive a ``WebsocketMismatch`` exception if the only + match is a WebSocket rule but the bind is an HTTP request, or + if the match is an HTTP rule but the bind is a WebSocket + request. + + - you get a tuple in the form ``(endpoint, arguments)`` if there is + a match (unless `return_rule` is True, in which case you get a tuple + in the form ``(rule, arguments)``) + + If the path info is not passed to the match method the default path + info of the map is used (defaults to the root URL if not defined + explicitly). + + All of the exceptions raised are subclasses of `HTTPException` so they + can be used as WSGI responses. They will all render generic error or + redirect pages. + + Here is a small example for matching: + + >>> m = Map([ + ... Rule('/', endpoint='index'), + ... Rule('/downloads/', endpoint='downloads/index'), + ... Rule('/downloads/<int:id>', endpoint='downloads/show') + ... ]) + >>> urls = m.bind("example.com", "/") + >>> urls.match("/", "GET") + ('index', {}) + >>> urls.match("/downloads/42") + ('downloads/show', {'id': 42}) + + And here is what happens on redirect and missing URLs: + + >>> urls.match("/downloads") + Traceback (most recent call last): + ... + RequestRedirect: http://example.com/downloads/ + >>> urls.match("/missing") + Traceback (most recent call last): + ... + NotFound: 404 Not Found + + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + :param return_rule: return the rule that matched instead of just the + endpoint (defaults to `False`). + :param query_args: optional query arguments that are used for + automatic redirects as string or dictionary. It's + currently not possible to use the query arguments + for URL matching. + :param websocket: Match WebSocket instead of HTTP requests. A + websocket request has a ``ws`` or ``wss`` + :attr:`url_scheme`. This overrides that detection. + + .. versionadded:: 1.0 + Added ``websocket``. + + .. versionchanged:: 0.8 + ``query_args`` can be a string. + + .. versionadded:: 0.7 + Added ``query_args``. + + .. versionadded:: 0.6 + Added ``return_rule``. + """ + self.map.update() + if path_info is None: + path_info = self.path_info + if query_args is None: + query_args = self.query_args or {} + method = (method or self.default_method).upper() + + if websocket is None: + websocket = self.websocket + + domain_part = self.server_name + + if not self.map.host_matching and self.subdomain is not None: + domain_part = self.subdomain + + path_part = f"/{path_info.lstrip('/')}" if path_info else "" + + try: + result = self.map._matcher.match(domain_part, path_part, method, websocket) + except RequestPath as e: + # safe = https://url.spec.whatwg.org/#url-path-segment-string + new_path = quote( + e.path_info, safe="!$&'()*+,/:;=@", encoding=self.map.charset + ) + raise RequestRedirect( + self.make_redirect_url(new_path, query_args) + ) from None + except RequestAliasRedirect as e: + raise RequestRedirect( + self.make_alias_redirect_url( + f"{domain_part}|{path_part}", + e.endpoint, + e.matched_values, + method, + query_args, + ) + ) from None + except NoMatch as e: + if e.have_match_for: + raise MethodNotAllowed(valid_methods=list(e.have_match_for)) from None + + if e.websocket_mismatch: + raise WebsocketMismatch() from None + + raise NotFound() from None + else: + rule, rv = result + + if self.map.redirect_defaults: + redirect_url = self.get_default_redirect(rule, method, rv, query_args) + if redirect_url is not None: + raise RequestRedirect(redirect_url) + + if rule.redirect_to is not None: + if isinstance(rule.redirect_to, str): + + def _handle_match(match: t.Match[str]) -> str: + value = rv[match.group(1)] + return rule._converters[match.group(1)].to_url(value) + + redirect_url = _simple_rule_re.sub(_handle_match, rule.redirect_to) + else: + redirect_url = rule.redirect_to(self, **rv) + + if self.subdomain: + netloc = f"{self.subdomain}.{self.server_name}" + else: + netloc = self.server_name + + raise RequestRedirect( + urljoin( + f"{self.url_scheme or 'http'}://{netloc}{self.script_name}", + redirect_url, + ) + ) + + if return_rule: + return rule, rv + else: + return rule.endpoint, rv + + def test(self, path_info: str | None = None, method: str | None = None) -> bool: + """Test if a rule would match. Works like `match` but returns `True` + if the URL matches, or `False` if it does not exist. + + :param path_info: the path info to use for matching. Overrides the + path info specified on binding. + :param method: the HTTP method used for matching. Overrides the + method specified on binding. + """ + try: + self.match(path_info, method) + except RequestRedirect: + pass + except HTTPException: + return False + return True + + def allowed_methods(self, path_info: str | None = None) -> t.Iterable[str]: + """Returns the valid methods that match for a given path. + + .. versionadded:: 0.7 + """ + try: + self.match(path_info, method="--") + except MethodNotAllowed as e: + return e.valid_methods # type: ignore + except HTTPException: + pass + return [] + + def get_host(self, domain_part: str | None) -> str: + """Figures out the full host name for the given domain part. The + domain part is a subdomain in case host matching is disabled or + a full host name. + """ + if self.map.host_matching: + if domain_part is None: + return self.server_name + + return domain_part + + if domain_part is None: + subdomain = self.subdomain + else: + subdomain = domain_part + + if subdomain: + return f"{subdomain}.{self.server_name}" + else: + return self.server_name + + def get_default_redirect( + self, + rule: Rule, + method: str, + values: t.MutableMapping[str, t.Any], + query_args: t.Mapping[str, t.Any] | str, + ) -> str | None: + """A helper that returns the URL to redirect to if it finds one. + This is used for default redirecting only. + + :internal: + """ + assert self.map.redirect_defaults + for r in self.map._rules_by_endpoint[rule.endpoint]: + # every rule that comes after this one, including ourself + # has a lower priority for the defaults. We order the ones + # with the highest priority up for building. + if r is rule: + break + if r.provides_defaults_for(rule) and r.suitable_for(values, method): + values.update(r.defaults) # type: ignore + domain_part, path = r.build(values) # type: ignore + return self.make_redirect_url(path, query_args, domain_part=domain_part) + return None + + def encode_query_args(self, query_args: t.Mapping[str, t.Any] | str) -> str: + if not isinstance(query_args, str): + return _urlencode(query_args, encoding=self.map.charset) + return query_args + + def make_redirect_url( + self, + path_info: str, + query_args: t.Mapping[str, t.Any] | str | None = None, + domain_part: str | None = None, + ) -> str: + """Creates a redirect URL. + + :internal: + """ + if query_args is None: + query_args = self.query_args + + if query_args: + query_str = self.encode_query_args(query_args) + else: + query_str = None + + scheme = self.url_scheme or "http" + host = self.get_host(domain_part) + path = "/".join((self.script_name.strip("/"), path_info.lstrip("/"))) + return urlunsplit((scheme, host, path, query_str, None)) + + def make_alias_redirect_url( + self, + path: str, + endpoint: str, + values: t.Mapping[str, t.Any], + method: str, + query_args: t.Mapping[str, t.Any] | str, + ) -> str: + """Internally called to make an alias redirect URL.""" + url = self.build( + endpoint, values, method, append_unknown=False, force_external=True + ) + if query_args: + url += f"?{self.encode_query_args(query_args)}" + assert url != path, "detected invalid alias setting. No canonical URL found" + return url + + def _partial_build( + self, + endpoint: str, + values: t.Mapping[str, t.Any], + method: str | None, + append_unknown: bool, + ) -> tuple[str, str, bool] | None: + """Helper for :meth:`build`. Returns subdomain and path for the + rule that accepts this endpoint, values and method. + + :internal: + """ + # in case the method is none, try with the default method first + if method is None: + rv = self._partial_build( + endpoint, values, self.default_method, append_unknown + ) + if rv is not None: + return rv + + # Default method did not match or a specific method is passed. + # Check all for first match with matching host. If no matching + # host is found, go with first result. + first_match = None + + for rule in self.map._rules_by_endpoint.get(endpoint, ()): + if rule.suitable_for(values, method): + build_rv = rule.build(values, append_unknown) + + if build_rv is not None: + rv = (build_rv[0], build_rv[1], rule.websocket) + if self.map.host_matching: + if rv[0] == self.server_name: + return rv + elif first_match is None: + first_match = rv + else: + return rv + + return first_match + + def build( + self, + endpoint: str, + values: t.Mapping[str, t.Any] | None = None, + method: str | None = None, + force_external: bool = False, + append_unknown: bool = True, + url_scheme: str | None = None, + ) -> str: + """Building URLs works pretty much the other way round. Instead of + `match` you call `build` and pass it the endpoint and a dict of + arguments for the placeholders. + + The `build` function also accepts an argument called `force_external` + which, if you set it to `True` will force external URLs. Per default + external URLs (include the server name) will only be used if the + target URL is on a different subdomain. + + >>> m = Map([ + ... Rule('/', endpoint='index'), + ... Rule('/downloads/', endpoint='downloads/index'), + ... Rule('/downloads/<int:id>', endpoint='downloads/show') + ... ]) + >>> urls = m.bind("example.com", "/") + >>> urls.build("index", {}) + '/' + >>> urls.build("downloads/show", {'id': 42}) + '/downloads/42' + >>> urls.build("downloads/show", {'id': 42}, force_external=True) + 'http://example.com/downloads/42' + + Because URLs cannot contain non ASCII data you will always get + bytes back. Non ASCII characters are urlencoded with the + charset defined on the map instance. + + Additional values are converted to strings and appended to the URL as + URL querystring parameters: + + >>> urls.build("index", {'q': 'My Searchstring'}) + '/?q=My+Searchstring' + + When processing those additional values, lists are furthermore + interpreted as multiple values (as per + :py:class:`werkzeug.datastructures.MultiDict`): + + >>> urls.build("index", {'q': ['a', 'b', 'c']}) + '/?q=a&q=b&q=c' + + Passing a ``MultiDict`` will also add multiple values: + + >>> urls.build("index", MultiDict((('p', 'z'), ('q', 'a'), ('q', 'b')))) + '/?p=z&q=a&q=b' + + If a rule does not exist when building a `BuildError` exception is + raised. + + The build method accepts an argument called `method` which allows you + to specify the method you want to have an URL built for if you have + different methods for the same endpoint specified. + + :param endpoint: the endpoint of the URL to build. + :param values: the values for the URL to build. Unhandled values are + appended to the URL as query parameters. + :param method: the HTTP method for the rule if there are different + URLs for different methods on the same endpoint. + :param force_external: enforce full canonical external URLs. If the URL + scheme is not provided, this will generate + a protocol-relative URL. + :param append_unknown: unknown parameters are appended to the generated + URL as query string argument. Disable this + if you want the builder to ignore those. + :param url_scheme: Scheme to use in place of the bound + :attr:`url_scheme`. + + .. versionchanged:: 2.0 + Added the ``url_scheme`` parameter. + + .. versionadded:: 0.6 + Added the ``append_unknown`` parameter. + """ + self.map.update() + + if values: + if isinstance(values, MultiDict): + values = { + k: (v[0] if len(v) == 1 else v) + for k, v in dict.items(values) + if len(v) != 0 + } + else: # plain dict + values = {k: v for k, v in values.items() if v is not None} + else: + values = {} + + rv = self._partial_build(endpoint, values, method, append_unknown) + if rv is None: + raise BuildError(endpoint, values, method, self) + + domain_part, path, websocket = rv + host = self.get_host(domain_part) + + if url_scheme is None: + url_scheme = self.url_scheme + + # Always build WebSocket routes with the scheme (browsers + # require full URLs). If bound to a WebSocket, ensure that HTTP + # routes are built with an HTTP scheme. + secure = url_scheme in {"https", "wss"} + + if websocket: + force_external = True + url_scheme = "wss" if secure else "ws" + elif url_scheme: + url_scheme = "https" if secure else "http" + + # shortcut this. + if not force_external and ( + (self.map.host_matching and host == self.server_name) + or (not self.map.host_matching and domain_part == self.subdomain) + ): + return f"{self.script_name.rstrip('/')}/{path.lstrip('/')}" + + scheme = f"{url_scheme}:" if url_scheme else "" + return f"{scheme}//{host}{self.script_name[:-1]}/{path.lstrip('/')}" diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/matcher.py b/backend/test/lib/python3.8/site-packages/werkzeug/routing/matcher.py new file mode 100644 index 0000000000000000000000000000000000000000..0d1210a67da5f1c3eaf572a703c86c1d47c0bbab --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/routing/matcher.py @@ -0,0 +1,202 @@ +from __future__ import annotations + +import re +import typing as t +from dataclasses import dataclass +from dataclasses import field + +from .converters import ValidationError +from .exceptions import NoMatch +from .exceptions import RequestAliasRedirect +from .exceptions import RequestPath +from .rules import Rule +from .rules import RulePart + + +class SlashRequired(Exception): + pass + + +@dataclass +class State: + """A representation of a rule state. + + This includes the *rules* that correspond to the state and the + possible *static* and *dynamic* transitions to the next state. + """ + + dynamic: list[tuple[RulePart, State]] = field(default_factory=list) + rules: list[Rule] = field(default_factory=list) + static: dict[str, State] = field(default_factory=dict) + + +class StateMachineMatcher: + def __init__(self, merge_slashes: bool) -> None: + self._root = State() + self.merge_slashes = merge_slashes + + def add(self, rule: Rule) -> None: + state = self._root + for part in rule._parts: + if part.static: + state.static.setdefault(part.content, State()) + state = state.static[part.content] + else: + for test_part, new_state in state.dynamic: + if test_part == part: + state = new_state + break + else: + new_state = State() + state.dynamic.append((part, new_state)) + state = new_state + state.rules.append(rule) + + def update(self) -> None: + # For every state the dynamic transitions should be sorted by + # the weight of the transition + state = self._root + + def _update_state(state: State) -> None: + state.dynamic.sort(key=lambda entry: entry[0].weight) + for new_state in state.static.values(): + _update_state(new_state) + for _, new_state in state.dynamic: + _update_state(new_state) + + _update_state(state) + + def match( + self, domain: str, path: str, method: str, websocket: bool + ) -> tuple[Rule, t.MutableMapping[str, t.Any]]: + # To match to a rule we need to start at the root state and + # try to follow the transitions until we find a match, or find + # there is no transition to follow. + + have_match_for = set() + websocket_mismatch = False + + def _match( + state: State, parts: list[str], values: list[str] + ) -> tuple[Rule, list[str]] | None: + # This function is meant to be called recursively, and will attempt + # to match the head part to the state's transitions. + nonlocal have_match_for, websocket_mismatch + + # The base case is when all parts have been matched via + # transitions. Hence if there is a rule with methods & + # websocket that work return it and the dynamic values + # extracted. + if parts == []: + for rule in state.rules: + if rule.methods is not None and method not in rule.methods: + have_match_for.update(rule.methods) + elif rule.websocket != websocket: + websocket_mismatch = True + else: + return rule, values + + # Test if there is a match with this path with a + # trailing slash, if so raise an exception to report + # that matching is possible with an additional slash + if "" in state.static: + for rule in state.static[""].rules: + if websocket == rule.websocket and ( + rule.methods is None or method in rule.methods + ): + if rule.strict_slashes: + raise SlashRequired() + else: + return rule, values + return None + + part = parts[0] + # To match this part try the static transitions first + if part in state.static: + rv = _match(state.static[part], parts[1:], values) + if rv is not None: + return rv + # No match via the static transitions, so try the dynamic + # ones. + for test_part, new_state in state.dynamic: + target = part + remaining = parts[1:] + # A final part indicates a transition that always + # consumes the remaining parts i.e. transitions to a + # final state. + if test_part.final: + target = "/".join(parts) + remaining = [] + match = re.compile(test_part.content).match(target) + if match is not None: + if test_part.suffixed: + # If a part_isolating=False part has a slash suffix, remove the + # suffix from the match and check for the slash redirect next. + suffix = match.groups()[-1] + if suffix == "/": + remaining = [""] + + converter_groups = sorted( + match.groupdict().items(), key=lambda entry: entry[0] + ) + groups = [ + value + for key, value in converter_groups + if key[:11] == "__werkzeug_" + ] + rv = _match(new_state, remaining, values + groups) + if rv is not None: + return rv + + # If there is no match and the only part left is a + # trailing slash ("") consider rules that aren't + # strict-slashes as these should match if there is a final + # slash part. + if parts == [""]: + for rule in state.rules: + if rule.strict_slashes: + continue + if rule.methods is not None and method not in rule.methods: + have_match_for.update(rule.methods) + elif rule.websocket != websocket: + websocket_mismatch = True + else: + return rule, values + + return None + + try: + rv = _match(self._root, [domain, *path.split("/")], []) + except SlashRequired: + raise RequestPath(f"{path}/") from None + + if self.merge_slashes and rv is None: + # Try to match again, but with slashes merged + path = re.sub("/{2,}?", "/", path) + try: + rv = _match(self._root, [domain, *path.split("/")], []) + except SlashRequired: + raise RequestPath(f"{path}/") from None + if rv is None: + raise NoMatch(have_match_for, websocket_mismatch) + else: + raise RequestPath(f"{path}") + elif rv is not None: + rule, values = rv + + result = {} + for name, value in zip(rule._converters.keys(), values): + try: + value = rule._converters[name].to_python(value) + except ValidationError: + raise NoMatch(have_match_for, websocket_mismatch) from None + result[str(name)] = value + if rule.defaults: + result.update(rule.defaults) + + if rule.alias and rule.map.redirect_defaults: + raise RequestAliasRedirect(result, rule.endpoint) + + return rule, result + + raise NoMatch(have_match_for, websocket_mismatch) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/routing/rules.py b/backend/test/lib/python3.8/site-packages/werkzeug/routing/rules.py new file mode 100644 index 0000000000000000000000000000000000000000..904a0225847d2abaccfa64fe5bf0d8c9324fe231 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/routing/rules.py @@ -0,0 +1,914 @@ +from __future__ import annotations + +import ast +import re +import typing as t +from dataclasses import dataclass +from string import Template +from types import CodeType +from urllib.parse import quote + +from ..datastructures import iter_multi_items +from ..urls import _urlencode +from .converters import ValidationError + +if t.TYPE_CHECKING: + from .converters import BaseConverter + from .map import Map + + +class Weighting(t.NamedTuple): + number_static_weights: int + static_weights: list[tuple[int, int]] + number_argument_weights: int + argument_weights: list[int] + + +@dataclass +class RulePart: + """A part of a rule. + + Rules can be represented by parts as delimited by `/` with + instances of this class representing those parts. The *content* is + either the raw content if *static* or a regex string to match + against. The *weight* can be used to order parts when matching. + + """ + + content: str + final: bool + static: bool + suffixed: bool + weight: Weighting + + +_part_re = re.compile( + r""" + (?: + (?P<slash>/) # a slash + | + (?P<static>[^</]+) # static rule data + | + (?: + < + (?: + (?P<converter>[a-zA-Z_][a-zA-Z0-9_]*) # converter name + (?:\((?P<arguments>.*?)\))? # converter arguments + : # variable delimiter + )? + (?P<variable>[a-zA-Z_][a-zA-Z0-9_]*) # variable name + > + ) + ) + """, + re.VERBOSE, +) + +_simple_rule_re = re.compile(r"<([^>]+)>") +_converter_args_re = re.compile( + r""" + ((?P<name>\w+)\s*=\s*)? + (?P<value> + True|False| + \d+.\d+| + \d+.| + \d+| + [\w\d_.]+| + [urUR]?(?P<stringval>"[^"]*?"|'[^']*') + )\s*, + """, + re.VERBOSE, +) + + +_PYTHON_CONSTANTS = {"None": None, "True": True, "False": False} + + +def _find(value: str, target: str, pos: int) -> int: + """Find the *target* in *value* after *pos*. + + Returns the *value* length if *target* isn't found. + """ + try: + return value.index(target, pos) + except ValueError: + return len(value) + + +def _pythonize(value: str) -> None | bool | int | float | str: + if value in _PYTHON_CONSTANTS: + return _PYTHON_CONSTANTS[value] + for convert in int, float: + try: + return convert(value) # type: ignore + except ValueError: + pass + if value[:1] == value[-1:] and value[0] in "\"'": + value = value[1:-1] + return str(value) + + +def parse_converter_args(argstr: str) -> tuple[t.Tuple, dict[str, t.Any]]: + argstr += "," + args = [] + kwargs = {} + + for item in _converter_args_re.finditer(argstr): + value = item.group("stringval") + if value is None: + value = item.group("value") + value = _pythonize(value) + if not item.group("name"): + args.append(value) + else: + name = item.group("name") + kwargs[name] = value + + return tuple(args), kwargs + + +class RuleFactory: + """As soon as you have more complex URL setups it's a good idea to use rule + factories to avoid repetitive tasks. Some of them are builtin, others can + be added by subclassing `RuleFactory` and overriding `get_rules`. + """ + + def get_rules(self, map: Map) -> t.Iterable[Rule]: + """Subclasses of `RuleFactory` have to override this method and return + an iterable of rules.""" + raise NotImplementedError() + + +class Subdomain(RuleFactory): + """All URLs provided by this factory have the subdomain set to a + specific domain. For example if you want to use the subdomain for + the current language this can be a good setup:: + + url_map = Map([ + Rule('/', endpoint='#select_language'), + Subdomain('<string(length=2):lang_code>', [ + Rule('/', endpoint='index'), + Rule('/about', endpoint='about'), + Rule('/help', endpoint='help') + ]) + ]) + + All the rules except for the ``'#select_language'`` endpoint will now + listen on a two letter long subdomain that holds the language code + for the current request. + """ + + def __init__(self, subdomain: str, rules: t.Iterable[RuleFactory]) -> None: + self.subdomain = subdomain + self.rules = rules + + def get_rules(self, map: Map) -> t.Iterator[Rule]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.subdomain = self.subdomain + yield rule + + +class Submount(RuleFactory): + """Like `Subdomain` but prefixes the URL rule with a given string:: + + url_map = Map([ + Rule('/', endpoint='index'), + Submount('/blog', [ + Rule('/', endpoint='blog/index'), + Rule('/entry/<entry_slug>', endpoint='blog/show') + ]) + ]) + + Now the rule ``'blog/show'`` matches ``/blog/entry/<entry_slug>``. + """ + + def __init__(self, path: str, rules: t.Iterable[RuleFactory]) -> None: + self.path = path.rstrip("/") + self.rules = rules + + def get_rules(self, map: Map) -> t.Iterator[Rule]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.rule = self.path + rule.rule + yield rule + + +class EndpointPrefix(RuleFactory): + """Prefixes all endpoints (which must be strings for this factory) with + another string. This can be useful for sub applications:: + + url_map = Map([ + Rule('/', endpoint='index'), + EndpointPrefix('blog/', [Submount('/blog', [ + Rule('/', endpoint='index'), + Rule('/entry/<entry_slug>', endpoint='show') + ])]) + ]) + """ + + def __init__(self, prefix: str, rules: t.Iterable[RuleFactory]) -> None: + self.prefix = prefix + self.rules = rules + + def get_rules(self, map: Map) -> t.Iterator[Rule]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + rule = rule.empty() + rule.endpoint = self.prefix + rule.endpoint + yield rule + + +class RuleTemplate: + """Returns copies of the rules wrapped and expands string templates in + the endpoint, rule, defaults or subdomain sections. + + Here a small example for such a rule template:: + + from werkzeug.routing import Map, Rule, RuleTemplate + + resource = RuleTemplate([ + Rule('/$name/', endpoint='$name.list'), + Rule('/$name/<int:id>', endpoint='$name.show') + ]) + + url_map = Map([resource(name='user'), resource(name='page')]) + + When a rule template is called the keyword arguments are used to + replace the placeholders in all the string parameters. + """ + + def __init__(self, rules: t.Iterable[Rule]) -> None: + self.rules = list(rules) + + def __call__(self, *args: t.Any, **kwargs: t.Any) -> RuleTemplateFactory: + return RuleTemplateFactory(self.rules, dict(*args, **kwargs)) + + +class RuleTemplateFactory(RuleFactory): + """A factory that fills in template variables into rules. Used by + `RuleTemplate` internally. + + :internal: + """ + + def __init__( + self, rules: t.Iterable[RuleFactory], context: dict[str, t.Any] + ) -> None: + self.rules = rules + self.context = context + + def get_rules(self, map: Map) -> t.Iterator[Rule]: + for rulefactory in self.rules: + for rule in rulefactory.get_rules(map): + new_defaults = subdomain = None + if rule.defaults: + new_defaults = {} + for key, value in rule.defaults.items(): + if isinstance(value, str): + value = Template(value).substitute(self.context) + new_defaults[key] = value + if rule.subdomain is not None: + subdomain = Template(rule.subdomain).substitute(self.context) + new_endpoint = rule.endpoint + if isinstance(new_endpoint, str): + new_endpoint = Template(new_endpoint).substitute(self.context) + yield Rule( + Template(rule.rule).substitute(self.context), + new_defaults, + subdomain, + rule.methods, + rule.build_only, + new_endpoint, + rule.strict_slashes, + ) + + +def _prefix_names(src: str) -> ast.stmt: + """ast parse and prefix names with `.` to avoid collision with user vars""" + tree = ast.parse(src).body[0] + if isinstance(tree, ast.Expr): + tree = tree.value # type: ignore + for node in ast.walk(tree): + if isinstance(node, ast.Name): + node.id = f".{node.id}" + return tree + + +_CALL_CONVERTER_CODE_FMT = "self._converters[{elem!r}].to_url()" +_IF_KWARGS_URL_ENCODE_CODE = """\ +if kwargs: + params = self._encode_query_vars(kwargs) + q = "?" if params else "" +else: + q = params = "" +""" +_IF_KWARGS_URL_ENCODE_AST = _prefix_names(_IF_KWARGS_URL_ENCODE_CODE) +_URL_ENCODE_AST_NAMES = (_prefix_names("q"), _prefix_names("params")) + + +class Rule(RuleFactory): + """A Rule represents one URL pattern. There are some options for `Rule` + that change the way it behaves and are passed to the `Rule` constructor. + Note that besides the rule-string all arguments *must* be keyword arguments + in order to not break the application on Werkzeug upgrades. + + `string` + Rule strings basically are just normal URL paths with placeholders in + the format ``<converter(arguments):name>`` where the converter and the + arguments are optional. If no converter is defined the `default` + converter is used which means `string` in the normal configuration. + + URL rules that end with a slash are branch URLs, others are leaves. + If you have `strict_slashes` enabled (which is the default), all + branch URLs that are matched without a trailing slash will trigger a + redirect to the same URL with the missing slash appended. + + The converters are defined on the `Map`. + + `endpoint` + The endpoint for this rule. This can be anything. A reference to a + function, a string, a number etc. The preferred way is using a string + because the endpoint is used for URL generation. + + `defaults` + An optional dict with defaults for other rules with the same endpoint. + This is a bit tricky but useful if you want to have unique URLs:: + + url_map = Map([ + Rule('/all/', defaults={'page': 1}, endpoint='all_entries'), + Rule('/all/page/<int:page>', endpoint='all_entries') + ]) + + If a user now visits ``http://example.com/all/page/1`` they will be + redirected to ``http://example.com/all/``. If `redirect_defaults` is + disabled on the `Map` instance this will only affect the URL + generation. + + `subdomain` + The subdomain rule string for this rule. If not specified the rule + only matches for the `default_subdomain` of the map. If the map is + not bound to a subdomain this feature is disabled. + + Can be useful if you want to have user profiles on different subdomains + and all subdomains are forwarded to your application:: + + url_map = Map([ + Rule('/', subdomain='<username>', endpoint='user/homepage'), + Rule('/stats', subdomain='<username>', endpoint='user/stats') + ]) + + `methods` + A sequence of http methods this rule applies to. If not specified, all + methods are allowed. For example this can be useful if you want different + endpoints for `POST` and `GET`. If methods are defined and the path + matches but the method matched against is not in this list or in the + list of another rule for that path the error raised is of the type + `MethodNotAllowed` rather than `NotFound`. If `GET` is present in the + list of methods and `HEAD` is not, `HEAD` is added automatically. + + `strict_slashes` + Override the `Map` setting for `strict_slashes` only for this rule. If + not specified the `Map` setting is used. + + `merge_slashes` + Override :attr:`Map.merge_slashes` for this rule. + + `build_only` + Set this to True and the rule will never match but will create a URL + that can be build. This is useful if you have resources on a subdomain + or folder that are not handled by the WSGI application (like static data) + + `redirect_to` + If given this must be either a string or callable. In case of a + callable it's called with the url adapter that triggered the match and + the values of the URL as keyword arguments and has to return the target + for the redirect, otherwise it has to be a string with placeholders in + rule syntax:: + + def foo_with_slug(adapter, id): + # ask the database for the slug for the old id. this of + # course has nothing to do with werkzeug. + return f'foo/{Foo.get_slug_for_id(id)}' + + url_map = Map([ + Rule('/foo/<slug>', endpoint='foo'), + Rule('/some/old/url/<slug>', redirect_to='foo/<slug>'), + Rule('/other/old/url/<int:id>', redirect_to=foo_with_slug) + ]) + + When the rule is matched the routing system will raise a + `RequestRedirect` exception with the target for the redirect. + + Keep in mind that the URL will be joined against the URL root of the + script so don't use a leading slash on the target URL unless you + really mean root of that domain. + + `alias` + If enabled this rule serves as an alias for another rule with the same + endpoint and arguments. + + `host` + If provided and the URL map has host matching enabled this can be + used to provide a match rule for the whole host. This also means + that the subdomain feature is disabled. + + `websocket` + If ``True``, this rule is only matches for WebSocket (``ws://``, + ``wss://``) requests. By default, rules will only match for HTTP + requests. + + .. versionchanged:: 2.1 + Percent-encoded newlines (``%0a``), which are decoded by WSGI + servers, are considered when routing instead of terminating the + match early. + + .. versionadded:: 1.0 + Added ``websocket``. + + .. versionadded:: 1.0 + Added ``merge_slashes``. + + .. versionadded:: 0.7 + Added ``alias`` and ``host``. + + .. versionchanged:: 0.6.1 + ``HEAD`` is added to ``methods`` if ``GET`` is present. + """ + + def __init__( + self, + string: str, + defaults: t.Mapping[str, t.Any] | None = None, + subdomain: str | None = None, + methods: t.Iterable[str] | None = None, + build_only: bool = False, + endpoint: str | None = None, + strict_slashes: bool | None = None, + merge_slashes: bool | None = None, + redirect_to: str | t.Callable[..., str] | None = None, + alias: bool = False, + host: str | None = None, + websocket: bool = False, + ) -> None: + if not string.startswith("/"): + raise ValueError(f"URL rule '{string}' must start with a slash.") + + self.rule = string + self.is_leaf = not string.endswith("/") + self.is_branch = string.endswith("/") + + self.map: Map = None # type: ignore + self.strict_slashes = strict_slashes + self.merge_slashes = merge_slashes + self.subdomain = subdomain + self.host = host + self.defaults = defaults + self.build_only = build_only + self.alias = alias + self.websocket = websocket + + if methods is not None: + if isinstance(methods, str): + raise TypeError("'methods' should be a list of strings.") + + methods = {x.upper() for x in methods} + + if "HEAD" not in methods and "GET" in methods: + methods.add("HEAD") + + if websocket and methods - {"GET", "HEAD", "OPTIONS"}: + raise ValueError( + "WebSocket rules can only use 'GET', 'HEAD', and 'OPTIONS' methods." + ) + + self.methods = methods + self.endpoint: str = endpoint # type: ignore + self.redirect_to = redirect_to + + if defaults: + self.arguments = set(map(str, defaults)) + else: + self.arguments = set() + + self._converters: dict[str, BaseConverter] = {} + self._trace: list[tuple[bool, str]] = [] + self._parts: list[RulePart] = [] + + def empty(self) -> Rule: + """ + Return an unbound copy of this rule. + + This can be useful if want to reuse an already bound URL for another + map. See ``get_empty_kwargs`` to override what keyword arguments are + provided to the new copy. + """ + return type(self)(self.rule, **self.get_empty_kwargs()) + + def get_empty_kwargs(self) -> t.Mapping[str, t.Any]: + """ + Provides kwargs for instantiating empty copy with empty() + + Use this method to provide custom keyword arguments to the subclass of + ``Rule`` when calling ``some_rule.empty()``. Helpful when the subclass + has custom keyword arguments that are needed at instantiation. + + Must return a ``dict`` that will be provided as kwargs to the new + instance of ``Rule``, following the initial ``self.rule`` value which + is always provided as the first, required positional argument. + """ + defaults = None + if self.defaults: + defaults = dict(self.defaults) + return dict( + defaults=defaults, + subdomain=self.subdomain, + methods=self.methods, + build_only=self.build_only, + endpoint=self.endpoint, + strict_slashes=self.strict_slashes, + redirect_to=self.redirect_to, + alias=self.alias, + host=self.host, + ) + + def get_rules(self, map: Map) -> t.Iterator[Rule]: + yield self + + def refresh(self) -> None: + """Rebinds and refreshes the URL. Call this if you modified the + rule in place. + + :internal: + """ + self.bind(self.map, rebind=True) + + def bind(self, map: Map, rebind: bool = False) -> None: + """Bind the url to a map and create a regular expression based on + the information from the rule itself and the defaults from the map. + + :internal: + """ + if self.map is not None and not rebind: + raise RuntimeError(f"url rule {self!r} already bound to map {self.map!r}") + self.map = map + if self.strict_slashes is None: + self.strict_slashes = map.strict_slashes + if self.merge_slashes is None: + self.merge_slashes = map.merge_slashes + if self.subdomain is None: + self.subdomain = map.default_subdomain + self.compile() + + def get_converter( + self, + variable_name: str, + converter_name: str, + args: t.Tuple, + kwargs: t.Mapping[str, t.Any], + ) -> BaseConverter: + """Looks up the converter for the given parameter. + + .. versionadded:: 0.9 + """ + if converter_name not in self.map.converters: + raise LookupError(f"the converter {converter_name!r} does not exist") + return self.map.converters[converter_name](self.map, *args, **kwargs) + + def _encode_query_vars(self, query_vars: t.Mapping[str, t.Any]) -> str: + items: t.Iterable[tuple[str, str]] = iter_multi_items(query_vars) + + if self.map.sort_parameters: + items = sorted(items, key=self.map.sort_key) + + return _urlencode(items, encoding=self.map.charset) + + def _parse_rule(self, rule: str) -> t.Iterable[RulePart]: + content = "" + static = True + argument_weights = [] + static_weights: list[tuple[int, int]] = [] + final = False + convertor_number = 0 + + pos = 0 + while pos < len(rule): + match = _part_re.match(rule, pos) + if match is None: + raise ValueError(f"malformed url rule: {rule!r}") + + data = match.groupdict() + if data["static"] is not None: + static_weights.append((len(static_weights), -len(data["static"]))) + self._trace.append((False, data["static"])) + content += data["static"] if static else re.escape(data["static"]) + + if data["variable"] is not None: + if static: + # Switching content to represent regex, hence the need to escape + content = re.escape(content) + static = False + c_args, c_kwargs = parse_converter_args(data["arguments"] or "") + convobj = self.get_converter( + data["variable"], data["converter"] or "default", c_args, c_kwargs + ) + self._converters[data["variable"]] = convobj + self.arguments.add(data["variable"]) + if not convobj.part_isolating: + final = True + content += f"(?P<__werkzeug_{convertor_number}>{convobj.regex})" + convertor_number += 1 + argument_weights.append(convobj.weight) + self._trace.append((True, data["variable"])) + + if data["slash"] is not None: + self._trace.append((False, "/")) + if final: + content += "/" + else: + if not static: + content += r"\Z" + weight = Weighting( + -len(static_weights), + static_weights, + -len(argument_weights), + argument_weights, + ) + yield RulePart( + content=content, + final=final, + static=static, + suffixed=False, + weight=weight, + ) + content = "" + static = True + argument_weights = [] + static_weights = [] + final = False + convertor_number = 0 + + pos = match.end() + + suffixed = False + if final and content[-1] == "/": + # If a converter is part_isolating=False (matches slashes) and ends with a + # slash, augment the regex to support slash redirects. + suffixed = True + content = content[:-1] + "(?<!/)(/?)" + if not static: + content += r"\Z" + weight = Weighting( + -len(static_weights), + static_weights, + -len(argument_weights), + argument_weights, + ) + yield RulePart( + content=content, + final=final, + static=static, + suffixed=suffixed, + weight=weight, + ) + if suffixed: + yield RulePart( + content="", final=False, static=True, suffixed=False, weight=weight + ) + + def compile(self) -> None: + """Compiles the regular expression and stores it.""" + assert self.map is not None, "rule not bound" + + if self.map.host_matching: + domain_rule = self.host or "" + else: + domain_rule = self.subdomain or "" + self._parts = [] + self._trace = [] + self._converters = {} + if domain_rule == "": + self._parts = [ + RulePart( + content="", + final=False, + static=True, + suffixed=False, + weight=Weighting(0, [], 0, []), + ) + ] + else: + self._parts.extend(self._parse_rule(domain_rule)) + self._trace.append((False, "|")) + rule = self.rule + if self.merge_slashes: + rule = re.sub("/{2,}?", "/", self.rule) + self._parts.extend(self._parse_rule(rule)) + + self._build: t.Callable[..., tuple[str, str]] + self._build = self._compile_builder(False).__get__(self, None) + self._build_unknown: t.Callable[..., tuple[str, str]] + self._build_unknown = self._compile_builder(True).__get__(self, None) + + @staticmethod + def _get_func_code(code: CodeType, name: str) -> t.Callable[..., tuple[str, str]]: + globs: dict[str, t.Any] = {} + locs: dict[str, t.Any] = {} + exec(code, globs, locs) + return locs[name] # type: ignore + + def _compile_builder( + self, append_unknown: bool = True + ) -> t.Callable[..., tuple[str, str]]: + defaults = self.defaults or {} + dom_ops: list[tuple[bool, str]] = [] + url_ops: list[tuple[bool, str]] = [] + + opl = dom_ops + for is_dynamic, data in self._trace: + if data == "|" and opl is dom_ops: + opl = url_ops + continue + # this seems like a silly case to ever come up but: + # if a default is given for a value that appears in the rule, + # resolve it to a constant ahead of time + if is_dynamic and data in defaults: + data = self._converters[data].to_url(defaults[data]) + opl.append((False, data)) + elif not is_dynamic: + # safe = https://url.spec.whatwg.org/#url-path-segment-string + opl.append( + ( + False, + quote(data, safe="!$&'()*+,/:;=@", encoding=self.map.charset), + ) + ) + else: + opl.append((True, data)) + + def _convert(elem: str) -> ast.stmt: + ret = _prefix_names(_CALL_CONVERTER_CODE_FMT.format(elem=elem)) + ret.args = [ast.Name(str(elem), ast.Load())] # type: ignore # str for py2 + return ret + + def _parts(ops: list[tuple[bool, str]]) -> list[ast.AST]: + parts = [ + _convert(elem) if is_dynamic else ast.Constant(elem) + for is_dynamic, elem in ops + ] + parts = parts or [ast.Constant("")] + # constant fold + ret = [parts[0]] + for p in parts[1:]: + if isinstance(p, ast.Constant) and isinstance(ret[-1], ast.Constant): + ret[-1] = ast.Constant(ret[-1].value + p.value) + else: + ret.append(p) + return ret + + dom_parts = _parts(dom_ops) + url_parts = _parts(url_ops) + if not append_unknown: + body = [] + else: + body = [_IF_KWARGS_URL_ENCODE_AST] + url_parts.extend(_URL_ENCODE_AST_NAMES) + + def _join(parts: list[ast.AST]) -> ast.AST: + if len(parts) == 1: # shortcut + return parts[0] + return ast.JoinedStr(parts) + + body.append( + ast.Return(ast.Tuple([_join(dom_parts), _join(url_parts)], ast.Load())) + ) + + pargs = [ + elem + for is_dynamic, elem in dom_ops + url_ops + if is_dynamic and elem not in defaults + ] + kargs = [str(k) for k in defaults] + + func_ast: ast.FunctionDef = _prefix_names("def _(): pass") # type: ignore + func_ast.name = f"<builder:{self.rule!r}>" + func_ast.args.args.append(ast.arg(".self", None)) + for arg in pargs + kargs: + func_ast.args.args.append(ast.arg(arg, None)) + func_ast.args.kwarg = ast.arg(".kwargs", None) + for _ in kargs: + func_ast.args.defaults.append(ast.Constant("")) + func_ast.body = body + + # Use `ast.parse` instead of `ast.Module` for better portability, since the + # signature of `ast.Module` can change. + module = ast.parse("") + module.body = [func_ast] + + # mark everything as on line 1, offset 0 + # less error-prone than `ast.fix_missing_locations` + # bad line numbers cause an assert to fail in debug builds + for node in ast.walk(module): + if "lineno" in node._attributes: + node.lineno = 1 + if "end_lineno" in node._attributes: + node.end_lineno = node.lineno + if "col_offset" in node._attributes: + node.col_offset = 0 + if "end_col_offset" in node._attributes: + node.end_col_offset = node.col_offset + + code = compile(module, "<werkzeug routing>", "exec") + return self._get_func_code(code, func_ast.name) + + def build( + self, values: t.Mapping[str, t.Any], append_unknown: bool = True + ) -> tuple[str, str] | None: + """Assembles the relative url for that rule and the subdomain. + If building doesn't work for some reasons `None` is returned. + + :internal: + """ + try: + if append_unknown: + return self._build_unknown(**values) + else: + return self._build(**values) + except ValidationError: + return None + + def provides_defaults_for(self, rule: Rule) -> bool: + """Check if this rule has defaults for a given rule. + + :internal: + """ + return bool( + not self.build_only + and self.defaults + and self.endpoint == rule.endpoint + and self != rule + and self.arguments == rule.arguments + ) + + def suitable_for( + self, values: t.Mapping[str, t.Any], method: str | None = None + ) -> bool: + """Check if the dict of values has enough data for url generation. + + :internal: + """ + # if a method was given explicitly and that method is not supported + # by this rule, this rule is not suitable. + if ( + method is not None + and self.methods is not None + and method not in self.methods + ): + return False + + defaults = self.defaults or () + + # all arguments required must be either in the defaults dict or + # the value dictionary otherwise it's not suitable + for key in self.arguments: + if key not in defaults and key not in values: + return False + + # in case defaults are given we ensure that either the value was + # skipped or the value is the same as the default value. + if defaults: + for key, value in defaults.items(): + if key in values and value != values[key]: + return False + + return True + + def build_compare_key(self) -> tuple[int, int, int]: + """The build compare key for sorting. + + :internal: + """ + return (1 if self.alias else 0, -len(self.arguments), -len(self.defaults or ())) + + def __eq__(self, other: object) -> bool: + return isinstance(other, type(self)) and self._trace == other._trace + + __hash__ = None # type: ignore + + def __str__(self) -> str: + return self.rule + + def __repr__(self) -> str: + if self.map is None: + return f"<{type(self).__name__} (unbound)>" + parts = [] + for is_dynamic, data in self._trace: + if is_dynamic: + parts.append(f"<{data}>") + else: + parts.append(data) + parts = "".join(parts).lstrip("|") + methods = f" ({', '.join(self.methods)})" if self.methods is not None else "" + return f"<{type(self).__name__} {parts!r}{methods} -> {self.endpoint}>" diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__init__.py b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed99971344f116db34e27e4aa92e3bc5cfead299 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/http.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/http.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6983b225db8b5d0ea84780782e5f1743194a78c3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/http.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e1ee6f8bdcc9f5b3fbef915447a4167169752f4c Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/multipart.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/request.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/request.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..07d20c3f6f1851fa6fd38dffbcb9a9967aac774b Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/request.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/response.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/response.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..860bc68e4b948b927a2a05e2ab8a0ac4d84b9df3 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/response.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/utils.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/utils.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c8abccb47e42f586e1597ec5476f978bd8dce06f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/__pycache__/utils.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/http.py b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/http.py new file mode 100644 index 0000000000000000000000000000000000000000..21a6197203883bc392b907a434fc0c3c84013116 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/http.py @@ -0,0 +1,202 @@ +from __future__ import annotations + +import re +import typing as t +import warnings +from datetime import datetime + +from .._internal import _dt_as_utc +from ..http import generate_etag +from ..http import parse_date +from ..http import parse_etags +from ..http import parse_if_range_header +from ..http import unquote_etag + +_etag_re = re.compile(r'([Ww]/)?(?:"(.*?)"|(.*?))(?:\s*,\s*|$)') + + +def is_resource_modified( + http_range: str | None = None, + http_if_range: str | None = None, + http_if_modified_since: str | None = None, + http_if_none_match: str | None = None, + http_if_match: str | None = None, + etag: str | None = None, + data: bytes | None = None, + last_modified: datetime | str | None = None, + ignore_if_range: bool = True, +) -> bool: + """Convenience method for conditional requests. + :param http_range: Range HTTP header + :param http_if_range: If-Range HTTP header + :param http_if_modified_since: If-Modified-Since HTTP header + :param http_if_none_match: If-None-Match HTTP header + :param http_if_match: If-Match HTTP header + :param etag: the etag for the response for comparison. + :param data: or alternatively the data of the response to automatically + generate an etag using :func:`generate_etag`. + :param last_modified: an optional date of the last modification. + :param ignore_if_range: If `False`, `If-Range` header will be taken into + account. + :return: `True` if the resource was modified, otherwise `False`. + + .. versionadded:: 2.2 + """ + if etag is None and data is not None: + etag = generate_etag(data) + elif data is not None: + raise TypeError("both data and etag given") + + unmodified = False + if isinstance(last_modified, str): + last_modified = parse_date(last_modified) + + # HTTP doesn't use microsecond, remove it to avoid false positive + # comparisons. Mark naive datetimes as UTC. + if last_modified is not None: + last_modified = _dt_as_utc(last_modified.replace(microsecond=0)) + + if_range = None + if not ignore_if_range and http_range is not None: + # https://tools.ietf.org/html/rfc7233#section-3.2 + # A server MUST ignore an If-Range header field received in a request + # that does not contain a Range header field. + if_range = parse_if_range_header(http_if_range) + + if if_range is not None and if_range.date is not None: + modified_since: datetime | None = if_range.date + else: + modified_since = parse_date(http_if_modified_since) + + if modified_since and last_modified and last_modified <= modified_since: + unmodified = True + + if etag: + etag, _ = unquote_etag(etag) + etag = t.cast(str, etag) + + if if_range is not None and if_range.etag is not None: + unmodified = parse_etags(if_range.etag).contains(etag) + else: + if_none_match = parse_etags(http_if_none_match) + if if_none_match: + # https://tools.ietf.org/html/rfc7232#section-3.2 + # "A recipient MUST use the weak comparison function when comparing + # entity-tags for If-None-Match" + unmodified = if_none_match.contains_weak(etag) + + # https://tools.ietf.org/html/rfc7232#section-3.1 + # "Origin server MUST use the strong comparison function when + # comparing entity-tags for If-Match" + if_match = parse_etags(http_if_match) + if if_match: + unmodified = not if_match.is_strong(etag) + + return not unmodified + + +_cookie_re = re.compile( + r""" + ([^=;]*) + (?:\s*=\s* + ( + "(?:[^\\"]|\\.)*" + | + .*? + ) + )? + \s*;\s* + """, + flags=re.ASCII | re.VERBOSE, +) +_cookie_unslash_re = re.compile(rb"\\([0-3][0-7]{2}|.)") + + +def _cookie_unslash_replace(m: t.Match[bytes]) -> bytes: + v = m.group(1) + + if len(v) == 1: + return v + + return int(v, 8).to_bytes(1, "big") + + +def parse_cookie( + cookie: str | None = None, + charset: str | None = None, + errors: str | None = None, + cls: type[ds.MultiDict] | None = None, +) -> ds.MultiDict[str, str]: + """Parse a cookie from a string. + + The same key can be provided multiple times, the values are stored + in-order. The default :class:`MultiDict` will have the first value + first, and all values can be retrieved with + :meth:`MultiDict.getlist`. + + :param cookie: The cookie header as a string. + :param cls: A dict-like class to store the parsed cookies in. + Defaults to :class:`MultiDict`. + + .. versionchanged:: 2.3 + Passing bytes, and the ``charset`` and ``errors`` parameters, are deprecated and + will be removed in Werkzeug 3.0. + + .. versionadded:: 2.2 + """ + if cls is None: + cls = ds.MultiDict + + if isinstance(cookie, bytes): + warnings.warn( + "The 'cookie' parameter must be a string. Passing bytes is deprecated and" + " will not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + cookie = cookie.decode() + + if charset is not None: + warnings.warn( + "The 'charset' parameter is deprecated and will be removed in Werkzeug 3.0", + DeprecationWarning, + stacklevel=2, + ) + else: + charset = "utf-8" + + if errors is not None: + warnings.warn( + "The 'errors' parameter is deprecated and will be removed in Werkzeug 3.0", + DeprecationWarning, + stacklevel=2, + ) + else: + errors = "replace" + + if not cookie: + return cls() + + cookie = f"{cookie};" + out = [] + + for ck, cv in _cookie_re.findall(cookie): + ck = ck.strip() + cv = cv.strip() + + if not ck: + continue + + if len(cv) >= 2 and cv[0] == cv[-1] == '"': + # Work with bytes here, since a UTF-8 character could be multiple bytes. + cv = _cookie_unslash_re.sub( + _cookie_unslash_replace, cv[1:-1].encode() + ).decode(charset, errors) + + out.append((ck, cv)) + + return cls(out) + + +# circular dependencies +from .. import datastructures as ds diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/multipart.py b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/multipart.py new file mode 100644 index 0000000000000000000000000000000000000000..380993af72794afa4eac3c2770f5805b38f497fe --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/multipart.py @@ -0,0 +1,313 @@ +from __future__ import annotations + +import re +import typing as t +from dataclasses import dataclass +from enum import auto +from enum import Enum + +from ..datastructures import Headers +from ..exceptions import RequestEntityTooLarge +from ..http import parse_options_header + + +class Event: + pass + + +@dataclass(frozen=True) +class Preamble(Event): + data: bytes + + +@dataclass(frozen=True) +class Field(Event): + name: str + headers: Headers + + +@dataclass(frozen=True) +class File(Event): + name: str + filename: str + headers: Headers + + +@dataclass(frozen=True) +class Data(Event): + data: bytes + more_data: bool + + +@dataclass(frozen=True) +class Epilogue(Event): + data: bytes + + +class NeedData(Event): + pass + + +NEED_DATA = NeedData() + + +class State(Enum): + PREAMBLE = auto() + PART = auto() + DATA = auto() + DATA_START = auto() + EPILOGUE = auto() + COMPLETE = auto() + + +# Multipart line breaks MUST be CRLF (\r\n) by RFC-7578, except that +# many implementations break this and either use CR or LF alone. +LINE_BREAK = b"(?:\r\n|\n|\r)" +BLANK_LINE_RE = re.compile(b"(?:\r\n\r\n|\r\r|\n\n)", re.MULTILINE) +LINE_BREAK_RE = re.compile(LINE_BREAK, re.MULTILINE) +# Header values can be continued via a space or tab after the linebreak, as +# per RFC2231 +HEADER_CONTINUATION_RE = re.compile(b"%s[ \t]" % LINE_BREAK, re.MULTILINE) +# This must be long enough to contain any line breaks plus any +# additional boundary markers (--) such that they will be found in a +# subsequent search +SEARCH_EXTRA_LENGTH = 8 + + +class MultipartDecoder: + """Decodes a multipart message as bytes into Python events. + + The part data is returned as available to allow the caller to save + the data from memory to disk, if desired. + """ + + def __init__( + self, + boundary: bytes, + max_form_memory_size: int | None = None, + *, + max_parts: int | None = None, + ) -> None: + self.buffer = bytearray() + self.complete = False + self.max_form_memory_size = max_form_memory_size + self.max_parts = max_parts + self.state = State.PREAMBLE + self.boundary = boundary + + # Note in the below \h i.e. horizontal whitespace is used + # as [^\S\n\r] as \h isn't supported in python. + + # The preamble must end with a boundary where the boundary is + # prefixed by a line break, RFC2046. Except that many + # implementations including Werkzeug's tests omit the line + # break prefix. In addition the first boundary could be the + # epilogue boundary (for empty form-data) hence the matching + # group to understand if it is an epilogue boundary. + self.preamble_re = re.compile( + rb"%s?--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)" + % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK), + re.MULTILINE, + ) + # A boundary must include a line break prefix and suffix, and + # may include trailing whitespace. In addition the boundary + # could be the epilogue boundary hence the matching group to + # understand if it is an epilogue boundary. + self.boundary_re = re.compile( + rb"%s--%s(--[^\S\n\r]*%s?|[^\S\n\r]*%s)" + % (LINE_BREAK, re.escape(boundary), LINE_BREAK, LINE_BREAK), + re.MULTILINE, + ) + self._search_position = 0 + self._parts_decoded = 0 + + def last_newline(self, data: bytes) -> int: + try: + last_nl = data.rindex(b"\n") + except ValueError: + last_nl = len(data) + try: + last_cr = data.rindex(b"\r") + except ValueError: + last_cr = len(data) + + return min(last_nl, last_cr) + + def receive_data(self, data: bytes | None) -> None: + if data is None: + self.complete = True + elif ( + self.max_form_memory_size is not None + and len(self.buffer) + len(data) > self.max_form_memory_size + ): + raise RequestEntityTooLarge() + else: + self.buffer.extend(data) + + def next_event(self) -> Event: + event: Event = NEED_DATA + + if self.state == State.PREAMBLE: + match = self.preamble_re.search(self.buffer, self._search_position) + if match is not None: + if match.group(1).startswith(b"--"): + self.state = State.EPILOGUE + else: + self.state = State.PART + data = bytes(self.buffer[: match.start()]) + del self.buffer[: match.end()] + event = Preamble(data=data) + self._search_position = 0 + else: + # Update the search start position to be equal to the + # current buffer length (already searched) minus a + # safe buffer for part of the search target. + self._search_position = max( + 0, len(self.buffer) - len(self.boundary) - SEARCH_EXTRA_LENGTH + ) + + elif self.state == State.PART: + match = BLANK_LINE_RE.search(self.buffer, self._search_position) + if match is not None: + headers = self._parse_headers(self.buffer[: match.start()]) + # The final header ends with a single CRLF, however a + # blank line indicates the start of the + # body. Therefore the end is after the first CRLF. + headers_end = (match.start() + match.end()) // 2 + del self.buffer[:headers_end] + + if "content-disposition" not in headers: + raise ValueError("Missing Content-Disposition header") + + disposition, extra = parse_options_header( + headers["content-disposition"] + ) + name = t.cast(str, extra.get("name")) + filename = extra.get("filename") + if filename is not None: + event = File( + filename=filename, + headers=headers, + name=name, + ) + else: + event = Field( + headers=headers, + name=name, + ) + self.state = State.DATA_START + self._search_position = 0 + self._parts_decoded += 1 + + if self.max_parts is not None and self._parts_decoded > self.max_parts: + raise RequestEntityTooLarge() + else: + # Update the search start position to be equal to the + # current buffer length (already searched) minus a + # safe buffer for part of the search target. + self._search_position = max(0, len(self.buffer) - SEARCH_EXTRA_LENGTH) + + elif self.state == State.DATA_START: + data, del_index, more_data = self._parse_data(self.buffer, start=True) + del self.buffer[:del_index] + event = Data(data=data, more_data=more_data) + if more_data: + self.state = State.DATA + + elif self.state == State.DATA: + data, del_index, more_data = self._parse_data(self.buffer, start=False) + del self.buffer[:del_index] + if data or not more_data: + event = Data(data=data, more_data=more_data) + + elif self.state == State.EPILOGUE and self.complete: + event = Epilogue(data=bytes(self.buffer)) + del self.buffer[:] + self.state = State.COMPLETE + + if self.complete and isinstance(event, NeedData): + raise ValueError(f"Invalid form-data cannot parse beyond {self.state}") + + return event + + def _parse_headers(self, data: bytes) -> Headers: + headers: list[tuple[str, str]] = [] + # Merge the continued headers into one line + data = HEADER_CONTINUATION_RE.sub(b" ", data) + # Now there is one header per line + for line in data.splitlines(): + line = line.strip() + + if line != b"": + name, _, value = line.decode().partition(":") + headers.append((name.strip(), value.strip())) + return Headers(headers) + + def _parse_data(self, data: bytes, *, start: bool) -> tuple[bytes, int, bool]: + # Body parts must start with CRLF (or CR or LF) + if start: + match = LINE_BREAK_RE.match(data) + data_start = t.cast(t.Match[bytes], match).end() + else: + data_start = 0 + + if self.buffer.find(b"--" + self.boundary) == -1: + # No complete boundary in the buffer, but there may be + # a partial boundary at the end. As the boundary + # starts with either a nl or cr find the earliest and + # return up to that as data. + data_end = del_index = self.last_newline(data[data_start:]) + data_start + more_data = True + else: + match = self.boundary_re.search(data) + if match is not None: + if match.group(1).startswith(b"--"): + self.state = State.EPILOGUE + else: + self.state = State.PART + data_end = match.start() + del_index = match.end() + else: + data_end = del_index = self.last_newline(data[data_start:]) + data_start + more_data = match is None + + return bytes(data[data_start:data_end]), del_index, more_data + + +class MultipartEncoder: + def __init__(self, boundary: bytes) -> None: + self.boundary = boundary + self.state = State.PREAMBLE + + def send_event(self, event: Event) -> bytes: + if isinstance(event, Preamble) and self.state == State.PREAMBLE: + self.state = State.PART + return event.data + elif isinstance(event, (Field, File)) and self.state in { + State.PREAMBLE, + State.PART, + State.DATA, + }: + data = b"\r\n--" + self.boundary + b"\r\n" + data += b'Content-Disposition: form-data; name="%s"' % event.name.encode() + if isinstance(event, File): + data += b'; filename="%s"' % event.filename.encode() + data += b"\r\n" + for name, value in t.cast(Field, event).headers: + if name.lower() != "content-disposition": + data += f"{name}: {value}\r\n".encode() + self.state = State.DATA_START + return data + elif isinstance(event, Data) and self.state == State.DATA_START: + self.state = State.DATA + if len(event.data) > 0: + return b"\r\n" + event.data + else: + return event.data + elif isinstance(event, Data) and self.state == State.DATA: + return event.data + elif isinstance(event, Epilogue): + self.state = State.COMPLETE + return b"\r\n--" + self.boundary + b"--\r\n" + event.data + else: + raise ValueError(f"Cannot generate {event} in state: {self.state}") diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/request.py b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/request.py new file mode 100644 index 0000000000000000000000000000000000000000..0bcda90b2ca14ceac5b6eb8647bea197ccbec459 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/request.py @@ -0,0 +1,659 @@ +from __future__ import annotations + +import typing as t +import warnings +from datetime import datetime +from urllib.parse import parse_qsl + +from ..datastructures import Accept +from ..datastructures import Authorization +from ..datastructures import CharsetAccept +from ..datastructures import ETags +from ..datastructures import Headers +from ..datastructures import HeaderSet +from ..datastructures import IfRange +from ..datastructures import ImmutableList +from ..datastructures import ImmutableMultiDict +from ..datastructures import LanguageAccept +from ..datastructures import MIMEAccept +from ..datastructures import MultiDict +from ..datastructures import Range +from ..datastructures import RequestCacheControl +from ..http import parse_accept_header +from ..http import parse_cache_control_header +from ..http import parse_date +from ..http import parse_etags +from ..http import parse_if_range_header +from ..http import parse_list_header +from ..http import parse_options_header +from ..http import parse_range_header +from ..http import parse_set_header +from ..user_agent import UserAgent +from ..utils import cached_property +from ..utils import header_property +from .http import parse_cookie +from .utils import get_content_length +from .utils import get_current_url +from .utils import get_host + + +class Request: + """Represents the non-IO parts of a HTTP request, including the + method, URL info, and headers. + + This class is not meant for general use. It should only be used when + implementing WSGI, ASGI, or another HTTP application spec. Werkzeug + provides a WSGI implementation at :cls:`werkzeug.wrappers.Request`. + + :param method: The method the request was made with, such as + ``GET``. + :param scheme: The URL scheme of the protocol the request used, such + as ``https`` or ``wss``. + :param server: The address of the server. ``(host, port)``, + ``(path, None)`` for unix sockets, or ``None`` if not known. + :param root_path: The prefix that the application is mounted under. + This is prepended to generated URLs, but is not part of route + matching. + :param path: The path part of the URL after ``root_path``. + :param query_string: The part of the URL after the "?". + :param headers: The headers received with the request. + :param remote_addr: The address of the client sending the request. + + .. versionadded:: 2.0 + """ + + _charset: str + + @property + def charset(self) -> str: + """The charset used to decode body, form, and cookie data. Defaults to UTF-8. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Request data must always be UTF-8. + """ + warnings.warn( + "The 'charset' attribute is deprecated and will not be used in Werkzeug" + " 2.4. Interpreting bytes as text in body, form, and cookie data will" + " always use UTF-8.", + DeprecationWarning, + stacklevel=2, + ) + return self._charset + + @charset.setter + def charset(self, value: str) -> None: + warnings.warn( + "The 'charset' attribute is deprecated and will not be used in Werkzeug" + " 2.4. Interpreting bytes as text in body, form, and cookie data will" + " always use UTF-8.", + DeprecationWarning, + stacklevel=2, + ) + self._charset = value + + _encoding_errors: str + + @property + def encoding_errors(self) -> str: + """How errors when decoding bytes are handled. Defaults to "replace". + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. + """ + warnings.warn( + "The 'encoding_errors' attribute is deprecated and will not be used in" + " Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + return self._encoding_errors + + @encoding_errors.setter + def encoding_errors(self, value: str) -> None: + warnings.warn( + "The 'encoding_errors' attribute is deprecated and will not be used in" + " Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + self._encoding_errors = value + + _url_charset: str + + @property + def url_charset(self) -> str: + """The charset to use when decoding percent-encoded bytes in :attr:`args`. + Defaults to the value of :attr:`charset`, which defaults to UTF-8. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Percent-encoded bytes must always be UTF-8. + + .. versionadded:: 0.6 + """ + warnings.warn( + "The 'url_charset' attribute is deprecated and will not be used in" + " Werkzeug 3.0. Percent-encoded bytes must always be UTF-8.", + DeprecationWarning, + stacklevel=2, + ) + return self._url_charset + + @url_charset.setter + def url_charset(self, value: str) -> None: + warnings.warn( + "The 'url_charset' attribute is deprecated and will not be used in" + " Werkzeug 3.0. Percent-encoded bytes must always be UTF-8.", + DeprecationWarning, + stacklevel=2, + ) + self._url_charset = value + + #: the class to use for `args` and `form`. The default is an + #: :class:`~werkzeug.datastructures.ImmutableMultiDict` which supports + #: multiple values per key. alternatively it makes sense to use an + #: :class:`~werkzeug.datastructures.ImmutableOrderedMultiDict` which + #: preserves order or a :class:`~werkzeug.datastructures.ImmutableDict` + #: which is the fastest but only remembers the last key. It is also + #: possible to use mutable structures, but this is not recommended. + #: + #: .. versionadded:: 0.6 + parameter_storage_class: type[MultiDict] = ImmutableMultiDict + + #: The type to be used for dict values from the incoming WSGI + #: environment. (For example for :attr:`cookies`.) By default an + #: :class:`~werkzeug.datastructures.ImmutableMultiDict` is used. + #: + #: .. versionchanged:: 1.0.0 + #: Changed to ``ImmutableMultiDict`` to support multiple values. + #: + #: .. versionadded:: 0.6 + dict_storage_class: type[MultiDict] = ImmutableMultiDict + + #: the type to be used for list values from the incoming WSGI environment. + #: By default an :class:`~werkzeug.datastructures.ImmutableList` is used + #: (for example for :attr:`access_list`). + #: + #: .. versionadded:: 0.6 + list_storage_class: type[t.List] = ImmutableList + + user_agent_class: type[UserAgent] = UserAgent + """The class used and returned by the :attr:`user_agent` property to + parse the header. Defaults to + :class:`~werkzeug.user_agent.UserAgent`, which does no parsing. An + extension can provide a subclass that uses a parser to provide other + data. + + .. versionadded:: 2.0 + """ + + #: Valid host names when handling requests. By default all hosts are + #: trusted, which means that whatever the client says the host is + #: will be accepted. + #: + #: Because ``Host`` and ``X-Forwarded-Host`` headers can be set to + #: any value by a malicious client, it is recommended to either set + #: this property or implement similar validation in the proxy (if + #: the application is being run behind one). + #: + #: .. versionadded:: 0.9 + trusted_hosts: list[str] | None = None + + def __init__( + self, + method: str, + scheme: str, + server: tuple[str, int | None] | None, + root_path: str, + path: str, + query_string: bytes, + headers: Headers, + remote_addr: str | None, + ) -> None: + if not isinstance(type(self).charset, property): + warnings.warn( + "The 'charset' attribute is deprecated and will not be used in Werkzeug" + " 2.4. Interpreting bytes as text in body, form, and cookie data will" + " always use UTF-8.", + DeprecationWarning, + stacklevel=2, + ) + self._charset = self.charset + else: + self._charset = "utf-8" + + if not isinstance(type(self).encoding_errors, property): + warnings.warn( + "The 'encoding_errors' attribute is deprecated and will not be used in" + " Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + self._encoding_errors = self.encoding_errors + else: + self._encoding_errors = "replace" + + if not isinstance(type(self).url_charset, property): + warnings.warn( + "The 'url_charset' attribute is deprecated and will not be used in" + " Werkzeug 3.0. Percent-encoded bytes must always be UTF-8.", + DeprecationWarning, + stacklevel=2, + ) + self._url_charset = self.url_charset + else: + self._url_charset = self._charset + + #: The method the request was made with, such as ``GET``. + self.method = method.upper() + #: The URL scheme of the protocol the request used, such as + #: ``https`` or ``wss``. + self.scheme = scheme + #: The address of the server. ``(host, port)``, ``(path, None)`` + #: for unix sockets, or ``None`` if not known. + self.server = server + #: The prefix that the application is mounted under, without a + #: trailing slash. :attr:`path` comes after this. + self.root_path = root_path.rstrip("/") + #: The path part of the URL after :attr:`root_path`. This is the + #: path used for routing within the application. + self.path = "/" + path.lstrip("/") + #: The part of the URL after the "?". This is the raw value, use + #: :attr:`args` for the parsed values. + self.query_string = query_string + #: The headers received with the request. + self.headers = headers + #: The address of the client sending the request. + self.remote_addr = remote_addr + + def __repr__(self) -> str: + try: + url = self.url + except Exception as e: + url = f"(invalid URL: {e})" + + return f"<{type(self).__name__} {url!r} [{self.method}]>" + + @cached_property + def args(self) -> MultiDict[str, str]: + """The parsed URL parameters (the part in the URL after the question + mark). + + By default an + :class:`~werkzeug.datastructures.ImmutableMultiDict` + is returned from this function. This can be changed by setting + :attr:`parameter_storage_class` to a different type. This might + be necessary if the order of the form data is important. + + .. versionchanged:: 2.3 + Invalid bytes remain percent encoded. + """ + return self.parameter_storage_class( + parse_qsl( + self.query_string.decode(), + keep_blank_values=True, + encoding=self._url_charset, + errors="werkzeug.url_quote", + ) + ) + + @cached_property + def access_route(self) -> list[str]: + """If a forwarded header exists this is a list of all ip addresses + from the client ip to the last proxy server. + """ + if "X-Forwarded-For" in self.headers: + return self.list_storage_class( + parse_list_header(self.headers["X-Forwarded-For"]) + ) + elif self.remote_addr is not None: + return self.list_storage_class([self.remote_addr]) + return self.list_storage_class() + + @cached_property + def full_path(self) -> str: + """Requested path, including the query string.""" + return f"{self.path}?{self.query_string.decode()}" + + @property + def is_secure(self) -> bool: + """``True`` if the request was made with a secure protocol + (HTTPS or WSS). + """ + return self.scheme in {"https", "wss"} + + @cached_property + def url(self) -> str: + """The full request URL with the scheme, host, root path, path, + and query string.""" + return get_current_url( + self.scheme, self.host, self.root_path, self.path, self.query_string + ) + + @cached_property + def base_url(self) -> str: + """Like :attr:`url` but without the query string.""" + return get_current_url(self.scheme, self.host, self.root_path, self.path) + + @cached_property + def root_url(self) -> str: + """The request URL scheme, host, and root path. This is the root + that the application is accessed from. + """ + return get_current_url(self.scheme, self.host, self.root_path) + + @cached_property + def host_url(self) -> str: + """The request URL scheme and host only.""" + return get_current_url(self.scheme, self.host) + + @cached_property + def host(self) -> str: + """The host name the request was made to, including the port if + it's non-standard. Validated with :attr:`trusted_hosts`. + """ + return get_host( + self.scheme, self.headers.get("host"), self.server, self.trusted_hosts + ) + + @cached_property + def cookies(self) -> ImmutableMultiDict[str, str]: + """A :class:`dict` with the contents of all cookies transmitted with + the request.""" + wsgi_combined_cookie = ";".join(self.headers.getlist("Cookie")) + charset = self._charset if self._charset != "utf-8" else None + errors = self._encoding_errors if self._encoding_errors != "replace" else None + return parse_cookie( # type: ignore + wsgi_combined_cookie, + charset=charset, + errors=errors, + cls=self.dict_storage_class, + ) + + # Common Descriptors + + content_type = header_property[str]( + "Content-Type", + doc="""The Content-Type entity-header field indicates the media + type of the entity-body sent to the recipient or, in the case of + the HEAD method, the media type that would have been sent had + the request been a GET.""", + read_only=True, + ) + + @cached_property + def content_length(self) -> int | None: + """The Content-Length entity-header field indicates the size of the + entity-body in bytes or, in the case of the HEAD method, the size of + the entity-body that would have been sent had the request been a + GET. + """ + return get_content_length( + http_content_length=self.headers.get("Content-Length"), + http_transfer_encoding=self.headers.get("Transfer-Encoding"), + ) + + content_encoding = header_property[str]( + "Content-Encoding", + doc="""The Content-Encoding entity-header field is used as a + modifier to the media-type. When present, its value indicates + what additional content codings have been applied to the + entity-body, and thus what decoding mechanisms must be applied + in order to obtain the media-type referenced by the Content-Type + header field. + + .. versionadded:: 0.9""", + read_only=True, + ) + content_md5 = header_property[str]( + "Content-MD5", + doc="""The Content-MD5 entity-header field, as defined in + RFC 1864, is an MD5 digest of the entity-body for the purpose of + providing an end-to-end message integrity check (MIC) of the + entity-body. (Note: a MIC is good for detecting accidental + modification of the entity-body in transit, but is not proof + against malicious attacks.) + + .. versionadded:: 0.9""", + read_only=True, + ) + referrer = header_property[str]( + "Referer", + doc="""The Referer[sic] request-header field allows the client + to specify, for the server's benefit, the address (URI) of the + resource from which the Request-URI was obtained (the + "referrer", although the header field is misspelled).""", + read_only=True, + ) + date = header_property( + "Date", + None, + parse_date, + doc="""The Date general-header field represents the date and + time at which the message was originated, having the same + semantics as orig-date in RFC 822. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + read_only=True, + ) + max_forwards = header_property( + "Max-Forwards", + None, + int, + doc="""The Max-Forwards request-header field provides a + mechanism with the TRACE and OPTIONS methods to limit the number + of proxies or gateways that can forward the request to the next + inbound server.""", + read_only=True, + ) + + def _parse_content_type(self) -> None: + if not hasattr(self, "_parsed_content_type"): + self._parsed_content_type = parse_options_header( + self.headers.get("Content-Type", "") + ) + + @property + def mimetype(self) -> str: + """Like :attr:`content_type`, but without parameters (eg, without + charset, type etc.) and always lowercase. For example if the content + type is ``text/HTML; charset=utf-8`` the mimetype would be + ``'text/html'``. + """ + self._parse_content_type() + return self._parsed_content_type[0].lower() + + @property + def mimetype_params(self) -> dict[str, str]: + """The mimetype parameters as dict. For example if the content + type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + """ + self._parse_content_type() + return self._parsed_content_type[1] + + @cached_property + def pragma(self) -> HeaderSet: + """The Pragma general-header field is used to include + implementation-specific directives that might apply to any recipient + along the request/response chain. All pragma directives specify + optional behavior from the viewpoint of the protocol; however, some + systems MAY require that behavior be consistent with the directives. + """ + return parse_set_header(self.headers.get("Pragma", "")) + + # Accept + + @cached_property + def accept_mimetypes(self) -> MIMEAccept: + """List of mimetypes this client supports as + :class:`~werkzeug.datastructures.MIMEAccept` object. + """ + return parse_accept_header(self.headers.get("Accept"), MIMEAccept) + + @cached_property + def accept_charsets(self) -> CharsetAccept: + """List of charsets this client supports as + :class:`~werkzeug.datastructures.CharsetAccept` object. + """ + return parse_accept_header(self.headers.get("Accept-Charset"), CharsetAccept) + + @cached_property + def accept_encodings(self) -> Accept: + """List of encodings this client accepts. Encodings in a HTTP term + are compression encodings such as gzip. For charsets have a look at + :attr:`accept_charset`. + """ + return parse_accept_header(self.headers.get("Accept-Encoding")) + + @cached_property + def accept_languages(self) -> LanguageAccept: + """List of languages this client accepts as + :class:`~werkzeug.datastructures.LanguageAccept` object. + + .. versionchanged 0.5 + In previous versions this was a regular + :class:`~werkzeug.datastructures.Accept` object. + """ + return parse_accept_header(self.headers.get("Accept-Language"), LanguageAccept) + + # ETag + + @cached_property + def cache_control(self) -> RequestCacheControl: + """A :class:`~werkzeug.datastructures.RequestCacheControl` object + for the incoming cache control headers. + """ + cache_control = self.headers.get("Cache-Control") + return parse_cache_control_header(cache_control, None, RequestCacheControl) + + @cached_property + def if_match(self) -> ETags: + """An object containing all the etags in the `If-Match` header. + + :rtype: :class:`~werkzeug.datastructures.ETags` + """ + return parse_etags(self.headers.get("If-Match")) + + @cached_property + def if_none_match(self) -> ETags: + """An object containing all the etags in the `If-None-Match` header. + + :rtype: :class:`~werkzeug.datastructures.ETags` + """ + return parse_etags(self.headers.get("If-None-Match")) + + @cached_property + def if_modified_since(self) -> datetime | None: + """The parsed `If-Modified-Since` header as a datetime object. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """ + return parse_date(self.headers.get("If-Modified-Since")) + + @cached_property + def if_unmodified_since(self) -> datetime | None: + """The parsed `If-Unmodified-Since` header as a datetime object. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """ + return parse_date(self.headers.get("If-Unmodified-Since")) + + @cached_property + def if_range(self) -> IfRange: + """The parsed ``If-Range`` header. + + .. versionchanged:: 2.0 + ``IfRange.date`` is timezone-aware. + + .. versionadded:: 0.7 + """ + return parse_if_range_header(self.headers.get("If-Range")) + + @cached_property + def range(self) -> Range | None: + """The parsed `Range` header. + + .. versionadded:: 0.7 + + :rtype: :class:`~werkzeug.datastructures.Range` + """ + return parse_range_header(self.headers.get("Range")) + + # User Agent + + @cached_property + def user_agent(self) -> UserAgent: + """The user agent. Use ``user_agent.string`` to get the header + value. Set :attr:`user_agent_class` to a subclass of + :class:`~werkzeug.user_agent.UserAgent` to provide parsing for + the other properties or other extended data. + + .. versionchanged:: 2.1 + The built-in parser was removed. Set ``user_agent_class`` to a ``UserAgent`` + subclass to parse data from the string. + """ + return self.user_agent_class(self.headers.get("User-Agent", "")) + + # Authorization + + @cached_property + def authorization(self) -> Authorization | None: + """The ``Authorization`` header parsed into an :class:`.Authorization` object. + ``None`` if the header is not present. + + .. versionchanged:: 2.3 + :class:`Authorization` is no longer a ``dict``. The ``token`` attribute + was added for auth schemes that use a token instead of parameters. + """ + return Authorization.from_header(self.headers.get("Authorization")) + + # CORS + + origin = header_property[str]( + "Origin", + doc=( + "The host that the request originated from. Set" + " :attr:`~CORSResponseMixin.access_control_allow_origin` on" + " the response to indicate which origins are allowed." + ), + read_only=True, + ) + + access_control_request_headers = header_property( + "Access-Control-Request-Headers", + load_func=parse_set_header, + doc=( + "Sent with a preflight request to indicate which headers" + " will be sent with the cross origin request. Set" + " :attr:`~CORSResponseMixin.access_control_allow_headers`" + " on the response to indicate which headers are allowed." + ), + read_only=True, + ) + + access_control_request_method = header_property[str]( + "Access-Control-Request-Method", + doc=( + "Sent with a preflight request to indicate which method" + " will be used for the cross origin request. Set" + " :attr:`~CORSResponseMixin.access_control_allow_methods`" + " on the response to indicate which methods are allowed." + ), + read_only=True, + ) + + @property + def is_json(self) -> bool: + """Check if the mimetype indicates JSON data, either + :mimetype:`application/json` or :mimetype:`application/*+json`. + """ + mt = self.mimetype + return ( + mt == "application/json" + or mt.startswith("application/") + and mt.endswith("+json") + ) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/response.py b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/response.py new file mode 100644 index 0000000000000000000000000000000000000000..e5c1df743def56c16e8145e34184cf14e982bc9a --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/response.py @@ -0,0 +1,789 @@ +from __future__ import annotations + +import typing as t +import warnings +from datetime import datetime +from datetime import timedelta +from datetime import timezone +from http import HTTPStatus + +from ..datastructures import Headers +from ..datastructures import HeaderSet +from ..http import dump_cookie +from ..http import HTTP_STATUS_CODES +from ..utils import get_content_type +from werkzeug.datastructures import CallbackDict +from werkzeug.datastructures import ContentRange +from werkzeug.datastructures import ContentSecurityPolicy +from werkzeug.datastructures import ResponseCacheControl +from werkzeug.datastructures import WWWAuthenticate +from werkzeug.http import COEP +from werkzeug.http import COOP +from werkzeug.http import dump_age +from werkzeug.http import dump_header +from werkzeug.http import dump_options_header +from werkzeug.http import http_date +from werkzeug.http import parse_age +from werkzeug.http import parse_cache_control_header +from werkzeug.http import parse_content_range_header +from werkzeug.http import parse_csp_header +from werkzeug.http import parse_date +from werkzeug.http import parse_options_header +from werkzeug.http import parse_set_header +from werkzeug.http import quote_etag +from werkzeug.http import unquote_etag +from werkzeug.utils import header_property + + +def _set_property(name: str, doc: str | None = None) -> property: + def fget(self: Response) -> HeaderSet: + def on_update(header_set: HeaderSet) -> None: + if not header_set and name in self.headers: + del self.headers[name] + elif header_set: + self.headers[name] = header_set.to_header() + + return parse_set_header(self.headers.get(name), on_update) + + def fset( + self: Response, + value: None | (str | dict[str, str | int] | t.Iterable[str]), + ) -> None: + if not value: + del self.headers[name] + elif isinstance(value, str): + self.headers[name] = value + else: + self.headers[name] = dump_header(value) + + return property(fget, fset, doc=doc) + + +class Response: + """Represents the non-IO parts of an HTTP response, specifically the + status and headers but not the body. + + This class is not meant for general use. It should only be used when + implementing WSGI, ASGI, or another HTTP application spec. Werkzeug + provides a WSGI implementation at :cls:`werkzeug.wrappers.Response`. + + :param status: The status code for the response. Either an int, in + which case the default status message is added, or a string in + the form ``{code} {message}``, like ``404 Not Found``. Defaults + to 200. + :param headers: A :class:`~werkzeug.datastructures.Headers` object, + or a list of ``(key, value)`` tuples that will be converted to a + ``Headers`` object. + :param mimetype: The mime type (content type without charset or + other parameters) of the response. If the value starts with + ``text/`` (or matches some other special cases), the charset + will be added to create the ``content_type``. + :param content_type: The full content type of the response. + Overrides building the value from ``mimetype``. + + .. versionadded:: 2.0 + """ + + _charset: str + + @property + def charset(self) -> str: + """The charset used to encode body and cookie data. Defaults to UTF-8. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Response data must always be UTF-8. + """ + warnings.warn( + "The 'charset' attribute is deprecated and will not be used in Werkzeug" + " 2.4. Text in body and cookie data will always use UTF-8.", + DeprecationWarning, + stacklevel=2, + ) + return self._charset + + @charset.setter + def charset(self, value: str) -> None: + warnings.warn( + "The 'charset' attribute is deprecated and will not be used in Werkzeug" + " 2.4. Text in body and cookie data will always use UTF-8.", + DeprecationWarning, + stacklevel=2, + ) + self._charset = value + + #: the default status if none is provided. + default_status = 200 + + #: the default mimetype if none is provided. + default_mimetype: str | None = "text/plain" + + #: Warn if a cookie header exceeds this size. The default, 4093, should be + #: safely `supported by most browsers <cookie_>`_. A cookie larger than + #: this size will still be sent, but it may be ignored or handled + #: incorrectly by some browsers. Set to 0 to disable this check. + #: + #: .. versionadded:: 0.13 + #: + #: .. _`cookie`: http://browsercookielimits.squawky.net/ + max_cookie_size = 4093 + + # A :class:`Headers` object representing the response headers. + headers: Headers + + def __init__( + self, + status: int | str | HTTPStatus | None = None, + headers: t.Mapping[str, str | t.Iterable[str]] + | t.Iterable[tuple[str, str]] + | None = None, + mimetype: str | None = None, + content_type: str | None = None, + ) -> None: + if not isinstance(type(self).charset, property): + warnings.warn( + "The 'charset' attribute is deprecated and will not be used in Werkzeug" + " 2.4. Text in body and cookie data will always use UTF-8.", + DeprecationWarning, + stacklevel=2, + ) + self._charset = self.charset + else: + self._charset = "utf-8" + + if isinstance(headers, Headers): + self.headers = headers + elif not headers: + self.headers = Headers() + else: + self.headers = Headers(headers) + + if content_type is None: + if mimetype is None and "content-type" not in self.headers: + mimetype = self.default_mimetype + if mimetype is not None: + mimetype = get_content_type(mimetype, self._charset) + content_type = mimetype + if content_type is not None: + self.headers["Content-Type"] = content_type + if status is None: + status = self.default_status + self.status = status # type: ignore + + def __repr__(self) -> str: + return f"<{type(self).__name__} [{self.status}]>" + + @property + def status_code(self) -> int: + """The HTTP status code as a number.""" + return self._status_code + + @status_code.setter + def status_code(self, code: int) -> None: + self.status = code # type: ignore + + @property + def status(self) -> str: + """The HTTP status code as a string.""" + return self._status + + @status.setter + def status(self, value: str | int | HTTPStatus) -> None: + self._status, self._status_code = self._clean_status(value) + + def _clean_status(self, value: str | int | HTTPStatus) -> tuple[str, int]: + if isinstance(value, (int, HTTPStatus)): + status_code = int(value) + else: + value = value.strip() + + if not value: + raise ValueError("Empty status argument") + + code_str, sep, _ = value.partition(" ") + + try: + status_code = int(code_str) + except ValueError: + # only message + return f"0 {value}", 0 + + if sep: + # code and message + return value, status_code + + # only code, look up message + try: + status = f"{status_code} {HTTP_STATUS_CODES[status_code].upper()}" + except KeyError: + status = f"{status_code} UNKNOWN" + + return status, status_code + + def set_cookie( + self, + key: str, + value: str = "", + max_age: timedelta | int | None = None, + expires: str | datetime | int | float | None = None, + path: str | None = "/", + domain: str | None = None, + secure: bool = False, + httponly: bool = False, + samesite: str | None = None, + ) -> None: + """Sets a cookie. + + A warning is raised if the size of the cookie header exceeds + :attr:`max_cookie_size`, but the header will still be set. + + :param key: the key (name) of the cookie to be set. + :param value: the value of the cookie. + :param max_age: should be a number of seconds, or `None` (default) if + the cookie should last only as long as the client's + browser session. + :param expires: should be a `datetime` object or UNIX timestamp. + :param path: limits the cookie to a given path, per default it will + span the whole domain. + :param domain: if you want to set a cross-domain cookie. For example, + ``domain="example.com"`` will set a cookie that is + readable by the domain ``www.example.com``, + ``foo.example.com`` etc. Otherwise, a cookie will only + be readable by the domain that set it. + :param secure: If ``True``, the cookie will only be available + via HTTPS. + :param httponly: Disallow JavaScript access to the cookie. + :param samesite: Limit the scope of the cookie to only be + attached to requests that are "same-site". + """ + charset = self._charset if self._charset != "utf-8" else None + self.headers.add( + "Set-Cookie", + dump_cookie( + key, + value=value, + max_age=max_age, + expires=expires, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + charset=charset, + max_size=self.max_cookie_size, + samesite=samesite, + ), + ) + + def delete_cookie( + self, + key: str, + path: str | None = "/", + domain: str | None = None, + secure: bool = False, + httponly: bool = False, + samesite: str | None = None, + ) -> None: + """Delete a cookie. Fails silently if key doesn't exist. + + :param key: the key (name) of the cookie to be deleted. + :param path: if the cookie that should be deleted was limited to a + path, the path has to be defined here. + :param domain: if the cookie that should be deleted was limited to a + domain, that domain has to be defined here. + :param secure: If ``True``, the cookie will only be available + via HTTPS. + :param httponly: Disallow JavaScript access to the cookie. + :param samesite: Limit the scope of the cookie to only be + attached to requests that are "same-site". + """ + self.set_cookie( + key, + expires=0, + max_age=0, + path=path, + domain=domain, + secure=secure, + httponly=httponly, + samesite=samesite, + ) + + @property + def is_json(self) -> bool: + """Check if the mimetype indicates JSON data, either + :mimetype:`application/json` or :mimetype:`application/*+json`. + """ + mt = self.mimetype + return mt is not None and ( + mt == "application/json" + or mt.startswith("application/") + and mt.endswith("+json") + ) + + # Common Descriptors + + @property + def mimetype(self) -> str | None: + """The mimetype (content type without charset etc.)""" + ct = self.headers.get("content-type") + + if ct: + return ct.split(";")[0].strip() + else: + return None + + @mimetype.setter + def mimetype(self, value: str) -> None: + self.headers["Content-Type"] = get_content_type(value, self._charset) + + @property + def mimetype_params(self) -> dict[str, str]: + """The mimetype parameters as dict. For example if the + content type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + + .. versionadded:: 0.5 + """ + + def on_update(d: CallbackDict) -> None: + self.headers["Content-Type"] = dump_options_header(self.mimetype, d) + + d = parse_options_header(self.headers.get("content-type", ""))[1] + return CallbackDict(d, on_update) + + location = header_property[str]( + "Location", + doc="""The Location response-header field is used to redirect + the recipient to a location other than the Request-URI for + completion of the request or identification of a new + resource.""", + ) + age = header_property( + "Age", + None, + parse_age, + dump_age, # type: ignore + doc="""The Age response-header field conveys the sender's + estimate of the amount of time since the response (or its + revalidation) was generated at the origin server. + + Age values are non-negative decimal integers, representing time + in seconds.""", + ) + content_type = header_property[str]( + "Content-Type", + doc="""The Content-Type entity-header field indicates the media + type of the entity-body sent to the recipient or, in the case of + the HEAD method, the media type that would have been sent had + the request been a GET.""", + ) + content_length = header_property( + "Content-Length", + None, + int, + str, + doc="""The Content-Length entity-header field indicates the size + of the entity-body, in decimal number of OCTETs, sent to the + recipient or, in the case of the HEAD method, the size of the + entity-body that would have been sent had the request been a + GET.""", + ) + content_location = header_property[str]( + "Content-Location", + doc="""The Content-Location entity-header field MAY be used to + supply the resource location for the entity enclosed in the + message when that entity is accessible from a location separate + from the requested resource's URI.""", + ) + content_encoding = header_property[str]( + "Content-Encoding", + doc="""The Content-Encoding entity-header field is used as a + modifier to the media-type. When present, its value indicates + what additional content codings have been applied to the + entity-body, and thus what decoding mechanisms must be applied + in order to obtain the media-type referenced by the Content-Type + header field.""", + ) + content_md5 = header_property[str]( + "Content-MD5", + doc="""The Content-MD5 entity-header field, as defined in + RFC 1864, is an MD5 digest of the entity-body for the purpose of + providing an end-to-end message integrity check (MIC) of the + entity-body. (Note: a MIC is good for detecting accidental + modification of the entity-body in transit, but is not proof + against malicious attacks.)""", + ) + date = header_property( + "Date", + None, + parse_date, + http_date, + doc="""The Date general-header field represents the date and + time at which the message was originated, having the same + semantics as orig-date in RFC 822. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + ) + expires = header_property( + "Expires", + None, + parse_date, + http_date, + doc="""The Expires entity-header field gives the date/time after + which the response is considered stale. A stale cache entry may + not normally be returned by a cache. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + ) + last_modified = header_property( + "Last-Modified", + None, + parse_date, + http_date, + doc="""The Last-Modified entity-header field indicates the date + and time at which the origin server believes the variant was + last modified. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """, + ) + + @property + def retry_after(self) -> datetime | None: + """The Retry-After response-header field can be used with a + 503 (Service Unavailable) response to indicate how long the + service is expected to be unavailable to the requesting client. + + Time in seconds until expiration or date. + + .. versionchanged:: 2.0 + The datetime object is timezone-aware. + """ + value = self.headers.get("retry-after") + if value is None: + return None + + try: + seconds = int(value) + except ValueError: + return parse_date(value) + + return datetime.now(timezone.utc) + timedelta(seconds=seconds) + + @retry_after.setter + def retry_after(self, value: datetime | int | str | None) -> None: + if value is None: + if "retry-after" in self.headers: + del self.headers["retry-after"] + return + elif isinstance(value, datetime): + value = http_date(value) + else: + value = str(value) + self.headers["Retry-After"] = value + + vary = _set_property( + "Vary", + doc="""The Vary field value indicates the set of request-header + fields that fully determines, while the response is fresh, + whether a cache is permitted to use the response to reply to a + subsequent request without revalidation.""", + ) + content_language = _set_property( + "Content-Language", + doc="""The Content-Language entity-header field describes the + natural language(s) of the intended audience for the enclosed + entity. Note that this might not be equivalent to all the + languages used within the entity-body.""", + ) + allow = _set_property( + "Allow", + doc="""The Allow entity-header field lists the set of methods + supported by the resource identified by the Request-URI. The + purpose of this field is strictly to inform the recipient of + valid methods associated with the resource. An Allow header + field MUST be present in a 405 (Method Not Allowed) + response.""", + ) + + # ETag + + @property + def cache_control(self) -> ResponseCacheControl: + """The Cache-Control general-header field is used to specify + directives that MUST be obeyed by all caching mechanisms along the + request/response chain. + """ + + def on_update(cache_control: ResponseCacheControl) -> None: + if not cache_control and "cache-control" in self.headers: + del self.headers["cache-control"] + elif cache_control: + self.headers["Cache-Control"] = cache_control.to_header() + + return parse_cache_control_header( + self.headers.get("cache-control"), on_update, ResponseCacheControl + ) + + def set_etag(self, etag: str, weak: bool = False) -> None: + """Set the etag, and override the old one if there was one.""" + self.headers["ETag"] = quote_etag(etag, weak) + + def get_etag(self) -> tuple[str, bool] | tuple[None, None]: + """Return a tuple in the form ``(etag, is_weak)``. If there is no + ETag the return value is ``(None, None)``. + """ + return unquote_etag(self.headers.get("ETag")) + + accept_ranges = header_property[str]( + "Accept-Ranges", + doc="""The `Accept-Ranges` header. Even though the name would + indicate that multiple values are supported, it must be one + string token only. + + The values ``'bytes'`` and ``'none'`` are common. + + .. versionadded:: 0.7""", + ) + + @property + def content_range(self) -> ContentRange: + """The ``Content-Range`` header as a + :class:`~werkzeug.datastructures.ContentRange` object. Available + even if the header is not set. + + .. versionadded:: 0.7 + """ + + def on_update(rng: ContentRange) -> None: + if not rng: + del self.headers["content-range"] + else: + self.headers["Content-Range"] = rng.to_header() + + rv = parse_content_range_header(self.headers.get("content-range"), on_update) + # always provide a content range object to make the descriptor + # more user friendly. It provides an unset() method that can be + # used to remove the header quickly. + if rv is None: + rv = ContentRange(None, None, None, on_update=on_update) + return rv + + @content_range.setter + def content_range(self, value: ContentRange | str | None) -> None: + if not value: + del self.headers["content-range"] + elif isinstance(value, str): + self.headers["Content-Range"] = value + else: + self.headers["Content-Range"] = value.to_header() + + # Authorization + + @property + def www_authenticate(self) -> WWWAuthenticate: + """The ``WWW-Authenticate`` header parsed into a :class:`.WWWAuthenticate` + object. Modifying the object will modify the header value. + + This header is not set by default. To set this header, assign an instance of + :class:`.WWWAuthenticate` to this attribute. + + .. code-block:: python + + response.www_authenticate = WWWAuthenticate( + "basic", {"realm": "Authentication Required"} + ) + + Multiple values for this header can be sent to give the client multiple options. + Assign a list to set multiple headers. However, modifying the items in the list + will not automatically update the header values, and accessing this attribute + will only ever return the first value. + + To unset this header, assign ``None`` or use ``del``. + + .. versionchanged:: 2.3 + This attribute can be assigned to to set the header. A list can be assigned + to set multiple header values. Use ``del`` to unset the header. + + .. versionchanged:: 2.3 + :class:`WWWAuthenticate` is no longer a ``dict``. The ``token`` attribute + was added for auth challenges that use a token instead of parameters. + """ + value = WWWAuthenticate.from_header(self.headers.get("WWW-Authenticate")) + + if value is None: + value = WWWAuthenticate("basic") + + def on_update(value: WWWAuthenticate) -> None: + self.www_authenticate = value + + value._on_update = on_update + return value + + @www_authenticate.setter + def www_authenticate( + self, value: WWWAuthenticate | list[WWWAuthenticate] | None + ) -> None: + if not value: # None or empty list + del self.www_authenticate + elif isinstance(value, list): + # Clear any existing header by setting the first item. + self.headers.set("WWW-Authenticate", value[0].to_header()) + + for item in value[1:]: + # Add additional header lines for additional items. + self.headers.add("WWW-Authenticate", item.to_header()) + else: + self.headers.set("WWW-Authenticate", value.to_header()) + + def on_update(value: WWWAuthenticate) -> None: + self.www_authenticate = value + + # When setting a single value, allow updating it directly. + value._on_update = on_update + + @www_authenticate.deleter + def www_authenticate(self) -> None: + if "WWW-Authenticate" in self.headers: + del self.headers["WWW-Authenticate"] + + # CSP + + @property + def content_security_policy(self) -> ContentSecurityPolicy: + """The ``Content-Security-Policy`` header as a + :class:`~werkzeug.datastructures.ContentSecurityPolicy` object. Available + even if the header is not set. + + The Content-Security-Policy header adds an additional layer of + security to help detect and mitigate certain types of attacks. + """ + + def on_update(csp: ContentSecurityPolicy) -> None: + if not csp: + del self.headers["content-security-policy"] + else: + self.headers["Content-Security-Policy"] = csp.to_header() + + rv = parse_csp_header(self.headers.get("content-security-policy"), on_update) + if rv is None: + rv = ContentSecurityPolicy(None, on_update=on_update) + return rv + + @content_security_policy.setter + def content_security_policy( + self, value: ContentSecurityPolicy | str | None + ) -> None: + if not value: + del self.headers["content-security-policy"] + elif isinstance(value, str): + self.headers["Content-Security-Policy"] = value + else: + self.headers["Content-Security-Policy"] = value.to_header() + + @property + def content_security_policy_report_only(self) -> ContentSecurityPolicy: + """The ``Content-Security-policy-report-only`` header as a + :class:`~werkzeug.datastructures.ContentSecurityPolicy` object. Available + even if the header is not set. + + The Content-Security-Policy-Report-Only header adds a csp policy + that is not enforced but is reported thereby helping detect + certain types of attacks. + """ + + def on_update(csp: ContentSecurityPolicy) -> None: + if not csp: + del self.headers["content-security-policy-report-only"] + else: + self.headers["Content-Security-policy-report-only"] = csp.to_header() + + rv = parse_csp_header( + self.headers.get("content-security-policy-report-only"), on_update + ) + if rv is None: + rv = ContentSecurityPolicy(None, on_update=on_update) + return rv + + @content_security_policy_report_only.setter + def content_security_policy_report_only( + self, value: ContentSecurityPolicy | str | None + ) -> None: + if not value: + del self.headers["content-security-policy-report-only"] + elif isinstance(value, str): + self.headers["Content-Security-policy-report-only"] = value + else: + self.headers["Content-Security-policy-report-only"] = value.to_header() + + # CORS + + @property + def access_control_allow_credentials(self) -> bool: + """Whether credentials can be shared by the browser to + JavaScript code. As part of the preflight request it indicates + whether credentials can be used on the cross origin request. + """ + return "Access-Control-Allow-Credentials" in self.headers + + @access_control_allow_credentials.setter + def access_control_allow_credentials(self, value: bool | None) -> None: + if value is True: + self.headers["Access-Control-Allow-Credentials"] = "true" + else: + self.headers.pop("Access-Control-Allow-Credentials", None) + + access_control_allow_headers = header_property( + "Access-Control-Allow-Headers", + load_func=parse_set_header, + dump_func=dump_header, + doc="Which headers can be sent with the cross origin request.", + ) + + access_control_allow_methods = header_property( + "Access-Control-Allow-Methods", + load_func=parse_set_header, + dump_func=dump_header, + doc="Which methods can be used for the cross origin request.", + ) + + access_control_allow_origin = header_property[str]( + "Access-Control-Allow-Origin", + doc="The origin or '*' for any origin that may make cross origin requests.", + ) + + access_control_expose_headers = header_property( + "Access-Control-Expose-Headers", + load_func=parse_set_header, + dump_func=dump_header, + doc="Which headers can be shared by the browser to JavaScript code.", + ) + + access_control_max_age = header_property( + "Access-Control-Max-Age", + load_func=int, + dump_func=str, + doc="The maximum age in seconds the access control settings can be cached for.", + ) + + cross_origin_opener_policy = header_property[COOP]( + "Cross-Origin-Opener-Policy", + load_func=lambda value: COOP(value), + dump_func=lambda value: value.value, + default=COOP.UNSAFE_NONE, + doc="""Allows control over sharing of browsing context group with cross-origin + documents. Values must be a member of the :class:`werkzeug.http.COOP` enum.""", + ) + + cross_origin_embedder_policy = header_property[COEP]( + "Cross-Origin-Embedder-Policy", + load_func=lambda value: COEP(value), + dump_func=lambda value: value.value, + default=COEP.UNSAFE_NONE, + doc="""Prevents a document from loading any cross-origin resources that do not + explicitly grant the document permission. Values must be a member of the + :class:`werkzeug.http.COEP` enum.""", + ) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/sansio/utils.py b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..48ec1bfa077d706a1d732ea17593f4d9cd00f593 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/sansio/utils.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import typing as t +from urllib.parse import quote + +from .._internal import _plain_int +from ..exceptions import SecurityError +from ..urls import uri_to_iri + + +def host_is_trusted(hostname: str, trusted_list: t.Iterable[str]) -> bool: + """Check if a host matches a list of trusted names. + + :param hostname: The name to check. + :param trusted_list: A list of valid names to match. If a name + starts with a dot it will match all subdomains. + + .. versionadded:: 0.9 + """ + if not hostname: + return False + + try: + hostname = hostname.partition(":")[0].encode("idna").decode("ascii") + except UnicodeEncodeError: + return False + + if isinstance(trusted_list, str): + trusted_list = [trusted_list] + + for ref in trusted_list: + if ref.startswith("."): + ref = ref[1:] + suffix_match = True + else: + suffix_match = False + + try: + ref = ref.partition(":")[0].encode("idna").decode("ascii") + except UnicodeEncodeError: + return False + + if ref == hostname or (suffix_match and hostname.endswith(f".{ref}")): + return True + + return False + + +def get_host( + scheme: str, + host_header: str | None, + server: tuple[str, int | None] | None = None, + trusted_hosts: t.Iterable[str] | None = None, +) -> str: + """Return the host for the given parameters. + + This first checks the ``host_header``. If it's not present, then + ``server`` is used. The host will only contain the port if it is + different than the standard port for the protocol. + + Optionally, verify that the host is trusted using + :func:`host_is_trusted` and raise a + :exc:`~werkzeug.exceptions.SecurityError` if it is not. + + :param scheme: The protocol the request used, like ``"https"``. + :param host_header: The ``Host`` header value. + :param server: Address of the server. ``(host, port)``, or + ``(path, None)`` for unix sockets. + :param trusted_hosts: A list of trusted host names. + + :return: Host, with port if necessary. + :raise ~werkzeug.exceptions.SecurityError: If the host is not + trusted. + """ + host = "" + + if host_header is not None: + host = host_header + elif server is not None: + host = server[0] + + if server[1] is not None: + host = f"{host}:{server[1]}" + + if scheme in {"http", "ws"} and host.endswith(":80"): + host = host[:-3] + elif scheme in {"https", "wss"} and host.endswith(":443"): + host = host[:-4] + + if trusted_hosts is not None: + if not host_is_trusted(host, trusted_hosts): + raise SecurityError(f"Host {host!r} is not trusted.") + + return host + + +def get_current_url( + scheme: str, + host: str, + root_path: str | None = None, + path: str | None = None, + query_string: bytes | None = None, +) -> str: + """Recreate the URL for a request. If an optional part isn't + provided, it and subsequent parts are not included in the URL. + + The URL is an IRI, not a URI, so it may contain Unicode characters. + Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII. + + :param scheme: The protocol the request used, like ``"https"``. + :param host: The host the request was made to. See :func:`get_host`. + :param root_path: Prefix that the application is mounted under. This + is prepended to ``path``. + :param path: The path part of the URL after ``root_path``. + :param query_string: The portion of the URL after the "?". + """ + url = [scheme, "://", host] + + if root_path is None: + url.append("/") + return uri_to_iri("".join(url)) + + # safe = https://url.spec.whatwg.org/#url-path-segment-string + # as well as percent for things that are already quoted + url.append(quote(root_path.rstrip("/"), safe="!$&'()*+,/:;=@%")) + url.append("/") + + if path is None: + return uri_to_iri("".join(url)) + + url.append(quote(path.lstrip("/"), safe="!$&'()*+,/:;=@%")) + + if query_string: + url.append("?") + url.append(quote(query_string, safe="!$&'()*+,/:;=?@%")) + + return uri_to_iri("".join(url)) + + +def get_content_length( + http_content_length: str | None = None, + http_transfer_encoding: str | None = None, +) -> int | None: + """Return the ``Content-Length`` header value as an int. If the header is not given + or the ``Transfer-Encoding`` header is ``chunked``, ``None`` is returned to indicate + a streaming request. If the value is not an integer, or negative, 0 is returned. + + :param http_content_length: The Content-Length HTTP header. + :param http_transfer_encoding: The Transfer-Encoding HTTP header. + + .. versionadded:: 2.2 + """ + if http_transfer_encoding == "chunked" or http_content_length is None: + return None + + try: + return max(0, _plain_int(http_content_length)) + except ValueError: + return 0 diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/security.py b/backend/test/lib/python3.8/site-packages/werkzeug/security.py new file mode 100644 index 0000000000000000000000000000000000000000..282c4fd8c35acc53e126307eece4ba71a669e948 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/security.py @@ -0,0 +1,172 @@ +from __future__ import annotations + +import hashlib +import hmac +import os +import posixpath +import secrets +import warnings + +SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" +DEFAULT_PBKDF2_ITERATIONS = 600000 + +_os_alt_seps: list[str] = list( + sep for sep in [os.sep, os.path.altsep] if sep is not None and sep != "/" +) + + +def gen_salt(length: int) -> str: + """Generate a random string of SALT_CHARS with specified ``length``.""" + if length <= 0: + raise ValueError("Salt length must be at least 1.") + + return "".join(secrets.choice(SALT_CHARS) for _ in range(length)) + + +def _hash_internal(method: str, salt: str, password: str) -> tuple[str, str]: + if method == "plain": + warnings.warn( + "The 'plain' password method is deprecated and will be removed in" + " Werkzeug 3.0. Migrate to the 'scrypt' method.", + stacklevel=3, + ) + return password, method + + method, *args = method.split(":") + salt = salt.encode("utf-8") + password = password.encode("utf-8") + + if method == "scrypt": + if not args: + n = 2**15 + r = 8 + p = 1 + else: + try: + n, r, p = map(int, args) + except ValueError: + raise ValueError("'scrypt' takes 3 arguments.") from None + + maxmem = 132 * n * r * p # ideally 128, but some extra seems needed + return ( + hashlib.scrypt(password, salt=salt, n=n, r=r, p=p, maxmem=maxmem).hex(), + f"scrypt:{n}:{r}:{p}", + ) + elif method == "pbkdf2": + len_args = len(args) + + if len_args == 0: + hash_name = "sha256" + iterations = DEFAULT_PBKDF2_ITERATIONS + elif len_args == 1: + hash_name = args[0] + iterations = DEFAULT_PBKDF2_ITERATIONS + elif len_args == 2: + hash_name = args[0] + iterations = int(args[1]) + else: + raise ValueError("'pbkdf2' takes 2 arguments.") + + return ( + hashlib.pbkdf2_hmac(hash_name, password, salt, iterations).hex(), + f"pbkdf2:{hash_name}:{iterations}", + ) + else: + warnings.warn( + f"The '{method}' password method is deprecated and will be removed in" + " Werkzeug 3.0. Migrate to the 'scrypt' method.", + stacklevel=3, + ) + return hmac.new(salt, password, method).hexdigest(), method + + +def generate_password_hash( + password: str, method: str = "pbkdf2", salt_length: int = 16 +) -> str: + """Securely hash a password for storage. A password can be compared to a stored hash + using :func:`check_password_hash`. + + The following methods are supported: + + - ``scrypt``, more secure but not available on PyPy. The parameters are ``n``, + ``r``, and ``p``, the default is ``scrypt:32768:8:1``. See + :func:`hashlib.scrypt`. + - ``pbkdf2``, the default. The parameters are ``hash_method`` and ``iterations``, + the default is ``pbkdf2:sha256:600000``. See :func:`hashlib.pbkdf2_hmac`. + + Default parameters may be updated to reflect current guidelines, and methods may be + deprecated and removed if they are no longer considered secure. To migrate old + hashes, you may generate a new hash when checking an old hash, or you may contact + users with a link to reset their password. + + :param password: The plaintext password. + :param method: The key derivation function and parameters. + :param salt_length: The number of characters to generate for the salt. + + .. versionchanged:: 2.3 + Scrypt support was added. + + .. versionchanged:: 2.3 + The default iterations for pbkdf2 was increased to 600,000. + + .. versionchanged:: 2.3 + All plain hashes are deprecated and will not be supported in Werkzeug 3.0. + """ + salt = gen_salt(salt_length) + h, actual_method = _hash_internal(method, salt, password) + return f"{actual_method}${salt}${h}" + + +def check_password_hash(pwhash: str, password: str) -> bool: + """Securely check that the given stored password hash, previously generated using + :func:`generate_password_hash`, matches the given password. + + Methods may be deprecated and removed if they are no longer considered secure. To + migrate old hashes, you may generate a new hash when checking an old hash, or you + may contact users with a link to reset their password. + + :param pwhash: The hashed password. + :param password: The plaintext password. + + .. versionchanged:: 2.3 + All plain hashes are deprecated and will not be supported in Werkzeug 3.0. + """ + try: + method, salt, hashval = pwhash.split("$", 2) + except ValueError: + return False + + return hmac.compare_digest(_hash_internal(method, salt, password)[0], hashval) + + +def safe_join(directory: str, *pathnames: str) -> str | None: + """Safely join zero or more untrusted path components to a base + directory to avoid escaping the base directory. + + :param directory: The trusted base directory. + :param pathnames: The untrusted path components relative to the + base directory. + :return: A safe path, otherwise ``None``. + """ + if not directory: + # Ensure we end up with ./path if directory="" is given, + # otherwise the first untrusted part could become trusted. + directory = "." + + parts = [directory] + + for filename in pathnames: + if filename != "": + filename = posixpath.normpath(filename) + + if ( + any(sep in filename for sep in _os_alt_seps) + or os.path.isabs(filename) + or filename == ".." + or filename.startswith("../") + ): + return None + + parts.append(filename) + + return posixpath.join(*parts) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/serving.py b/backend/test/lib/python3.8/site-packages/werkzeug/serving.py new file mode 100644 index 0000000000000000000000000000000000000000..c031dc45edc236a9ba4fb26420f9880dd8b1f81d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/serving.py @@ -0,0 +1,1107 @@ +"""A WSGI and HTTP server for use **during development only**. This +server is convenient to use, but is not designed to be particularly +stable, secure, or efficient. Use a dedicate WSGI server and HTTP +server when deploying to production. + +It provides features like interactive debugging and code reloading. Use +``run_simple`` to start the server. Put this in a ``run.py`` script: + +.. code-block:: python + + from myapp import create_app + from werkzeug import run_simple +""" +from __future__ import annotations + +import errno +import io +import os +import selectors +import socket +import socketserver +import sys +import typing as t +from datetime import datetime as dt +from datetime import timedelta +from datetime import timezone +from http.server import BaseHTTPRequestHandler +from http.server import HTTPServer +from urllib.parse import unquote +from urllib.parse import urlsplit + +from ._internal import _log +from ._internal import _wsgi_encoding_dance +from .exceptions import InternalServerError +from .urls import uri_to_iri + +try: + import ssl +except ImportError: + + class _SslDummy: + def __getattr__(self, name: str) -> t.Any: + raise RuntimeError( # noqa: B904 + "SSL is unavailable because this Python runtime was not" + " compiled with SSL/TLS support." + ) + + ssl = _SslDummy() # type: ignore + +_log_add_style = True + +if os.name == "nt": + try: + __import__("colorama") + except ImportError: + _log_add_style = False + +can_fork = hasattr(os, "fork") + +if can_fork: + ForkingMixIn = socketserver.ForkingMixIn +else: + + class ForkingMixIn: # type: ignore + pass + + +try: + af_unix = socket.AF_UNIX +except AttributeError: + af_unix = None # type: ignore + +LISTEN_QUEUE = 128 + +_TSSLContextArg = t.Optional[ + t.Union["ssl.SSLContext", t.Tuple[str, t.Optional[str]], t.Literal["adhoc"]] +] + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKeyWithSerialization, + ) + from cryptography.x509 import Certificate + + +class DechunkedInput(io.RawIOBase): + """An input stream that handles Transfer-Encoding 'chunked'""" + + def __init__(self, rfile: t.IO[bytes]) -> None: + self._rfile = rfile + self._done = False + self._len = 0 + + def readable(self) -> bool: + return True + + def read_chunk_len(self) -> int: + try: + line = self._rfile.readline().decode("latin1") + _len = int(line.strip(), 16) + except ValueError as e: + raise OSError("Invalid chunk header") from e + if _len < 0: + raise OSError("Negative chunk length not allowed") + return _len + + def readinto(self, buf: bytearray) -> int: # type: ignore + read = 0 + while not self._done and read < len(buf): + if self._len == 0: + # This is the first chunk or we fully consumed the previous + # one. Read the next length of the next chunk + self._len = self.read_chunk_len() + + if self._len == 0: + # Found the final chunk of size 0. The stream is now exhausted, + # but there is still a final newline that should be consumed + self._done = True + + if self._len > 0: + # There is data (left) in this chunk, so append it to the + # buffer. If this operation fully consumes the chunk, this will + # reset self._len to 0. + n = min(len(buf), self._len) + + # If (read + chunk size) becomes more than len(buf), buf will + # grow beyond the original size and read more data than + # required. So only read as much data as can fit in buf. + if read + n > len(buf): + buf[read:] = self._rfile.read(len(buf) - read) + self._len -= len(buf) - read + read = len(buf) + else: + buf[read : read + n] = self._rfile.read(n) + self._len -= n + read += n + + if self._len == 0: + # Skip the terminating newline of a chunk that has been fully + # consumed. This also applies to the 0-sized final chunk + terminator = self._rfile.readline() + if terminator not in (b"\n", b"\r\n", b"\r"): + raise OSError("Missing chunk terminating newline") + + return read + + +class WSGIRequestHandler(BaseHTTPRequestHandler): + """A request handler that implements WSGI dispatching.""" + + server: BaseWSGIServer + + @property + def server_version(self) -> str: # type: ignore + from . import __version__ + + return f"Werkzeug/{__version__}" + + def make_environ(self) -> WSGIEnvironment: + request_url = urlsplit(self.path) + url_scheme = "http" if self.server.ssl_context is None else "https" + + if not self.client_address: + self.client_address = ("<local>", 0) + elif isinstance(self.client_address, str): + self.client_address = (self.client_address, 0) + + # If there was no scheme but the path started with two slashes, + # the first segment may have been incorrectly parsed as the + # netloc, prepend it to the path again. + if not request_url.scheme and request_url.netloc: + path_info = f"/{request_url.netloc}{request_url.path}" + else: + path_info = request_url.path + + path_info = unquote(path_info) + + environ: WSGIEnvironment = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": url_scheme, + "wsgi.input": self.rfile, + "wsgi.errors": sys.stderr, + "wsgi.multithread": self.server.multithread, + "wsgi.multiprocess": self.server.multiprocess, + "wsgi.run_once": False, + "werkzeug.socket": self.connection, + "SERVER_SOFTWARE": self.server_version, + "REQUEST_METHOD": self.command, + "SCRIPT_NAME": "", + "PATH_INFO": _wsgi_encoding_dance(path_info), + "QUERY_STRING": _wsgi_encoding_dance(request_url.query), + # Non-standard, added by mod_wsgi, uWSGI + "REQUEST_URI": _wsgi_encoding_dance(self.path), + # Non-standard, added by gunicorn + "RAW_URI": _wsgi_encoding_dance(self.path), + "REMOTE_ADDR": self.address_string(), + "REMOTE_PORT": self.port_integer(), + "SERVER_NAME": self.server.server_address[0], + "SERVER_PORT": str(self.server.server_address[1]), + "SERVER_PROTOCOL": self.request_version, + } + + for key, value in self.headers.items(): + if "_" in key: + continue + + key = key.upper().replace("-", "_") + value = value.replace("\r\n", "") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = f"HTTP_{key}" + if key in environ: + value = f"{environ[key]},{value}" + environ[key] = value + + if environ.get("HTTP_TRANSFER_ENCODING", "").strip().lower() == "chunked": + environ["wsgi.input_terminated"] = True + environ["wsgi.input"] = DechunkedInput(environ["wsgi.input"]) + + # Per RFC 2616, if the URL is absolute, use that as the host. + # We're using "has a scheme" to indicate an absolute URL. + if request_url.scheme and request_url.netloc: + environ["HTTP_HOST"] = request_url.netloc + + try: + # binary_form=False gives nicer information, but wouldn't be compatible with + # what Nginx or Apache could return. + peer_cert = self.connection.getpeercert(binary_form=True) + if peer_cert is not None: + # Nginx and Apache use PEM format. + environ["SSL_CLIENT_CERT"] = ssl.DER_cert_to_PEM_cert(peer_cert) + except ValueError: + # SSL handshake hasn't finished. + self.server.log("error", "Cannot fetch SSL peer certificate info") + except AttributeError: + # Not using TLS, the socket will not have getpeercert(). + pass + + return environ + + def run_wsgi(self) -> None: + if self.headers.get("Expect", "").lower().strip() == "100-continue": + self.wfile.write(b"HTTP/1.1 100 Continue\r\n\r\n") + + self.environ = environ = self.make_environ() + status_set: str | None = None + headers_set: list[tuple[str, str]] | None = None + status_sent: str | None = None + headers_sent: list[tuple[str, str]] | None = None + chunk_response: bool = False + + def write(data: bytes) -> None: + nonlocal status_sent, headers_sent, chunk_response + assert status_set is not None, "write() before start_response" + assert headers_set is not None, "write() before start_response" + if status_sent is None: + status_sent = status_set + headers_sent = headers_set + try: + code_str, msg = status_sent.split(None, 1) + except ValueError: + code_str, msg = status_sent, "" + code = int(code_str) + self.send_response(code, msg) + header_keys = set() + for key, value in headers_sent: + self.send_header(key, value) + header_keys.add(key.lower()) + + # Use chunked transfer encoding if there is no content + # length. Do not use for 1xx and 204 responses. 304 + # responses and HEAD requests are also excluded, which + # is the more conservative behavior and matches other + # parts of the code. + # https://httpwg.org/specs/rfc7230.html#rfc.section.3.3.1 + if ( + not ( + "content-length" in header_keys + or environ["REQUEST_METHOD"] == "HEAD" + or (100 <= code < 200) + or code in {204, 304} + ) + and self.protocol_version >= "HTTP/1.1" + ): + chunk_response = True + self.send_header("Transfer-Encoding", "chunked") + + # Always close the connection. This disables HTTP/1.1 + # keep-alive connections. They aren't handled well by + # Python's http.server because it doesn't know how to + # drain the stream before the next request line. + self.send_header("Connection", "close") + self.end_headers() + + assert isinstance(data, bytes), "applications must write bytes" + + if data: + if chunk_response: + self.wfile.write(hex(len(data))[2:].encode()) + self.wfile.write(b"\r\n") + + self.wfile.write(data) + + if chunk_response: + self.wfile.write(b"\r\n") + + self.wfile.flush() + + def start_response(status, headers, exc_info=None): # type: ignore + nonlocal status_set, headers_set + if exc_info: + try: + if headers_sent: + raise exc_info[1].with_traceback(exc_info[2]) + finally: + exc_info = None + elif headers_set: + raise AssertionError("Headers already set") + status_set = status + headers_set = headers + return write + + def execute(app: WSGIApplication) -> None: + application_iter = app(environ, start_response) + try: + for data in application_iter: + write(data) + if not headers_sent: + write(b"") + if chunk_response: + self.wfile.write(b"0\r\n\r\n") + finally: + # Check for any remaining data in the read socket, and discard it. This + # will read past request.max_content_length, but lets the client see a + # 413 response instead of a connection reset failure. If we supported + # keep-alive connections, this naive approach would break by reading the + # next request line. Since we know that write (above) closes every + # connection we can read everything. + selector = selectors.DefaultSelector() + selector.register(self.connection, selectors.EVENT_READ) + total_size = 0 + total_reads = 0 + + # A timeout of 0 tends to fail because a client needs a small amount of + # time to continue sending its data. + while selector.select(timeout=0.01): + # Only read 10MB into memory at a time. + data = self.rfile.read(10_000_000) + total_size += len(data) + total_reads += 1 + + # Stop reading on no data, >=10GB, or 1000 reads. If a client sends + # more than that, they'll get a connection reset failure. + if not data or total_size >= 10_000_000_000 or total_reads > 1000: + break + + selector.close() + + if hasattr(application_iter, "close"): + application_iter.close() + + try: + execute(self.server.app) + except (ConnectionError, socket.timeout) as e: + self.connection_dropped(e, environ) + except Exception as e: + if self.server.passthrough_errors: + raise + + if status_sent is not None and chunk_response: + self.close_connection = True + + try: + # if we haven't yet sent the headers but they are set + # we roll back to be able to set them again. + if status_sent is None: + status_set = None + headers_set = None + execute(InternalServerError()) + except Exception: + pass + + from .debug.tbtools import DebugTraceback + + msg = DebugTraceback(e).render_traceback_text() + self.server.log("error", f"Error on request:\n{msg}") + + def handle(self) -> None: + """Handles a request ignoring dropped connections.""" + try: + super().handle() + except (ConnectionError, socket.timeout) as e: + self.connection_dropped(e) + except Exception as e: + if self.server.ssl_context is not None and is_ssl_error(e): + self.log_error("SSL error occurred: %s", e) + else: + raise + + def connection_dropped( + self, error: BaseException, environ: WSGIEnvironment | None = None + ) -> None: + """Called if the connection was closed by the client. By default + nothing happens. + """ + + def __getattr__(self, name: str) -> t.Any: + # All HTTP methods are handled by run_wsgi. + if name.startswith("do_"): + return self.run_wsgi + + # All other attributes are forwarded to the base class. + return getattr(super(), name) + + def address_string(self) -> str: + if getattr(self, "environ", None): + return self.environ["REMOTE_ADDR"] # type: ignore + + if not self.client_address: + return "<local>" + + return self.client_address[0] + + def port_integer(self) -> int: + return self.client_address[1] + + # Escape control characters. This is defined (but private) in Python 3.12. + _control_char_table = str.maketrans( + {c: rf"\x{c:02x}" for c in [*range(0x20), *range(0x7F, 0xA0)]} + ) + _control_char_table[ord("\\")] = r"\\" + + def log_request(self, code: int | str = "-", size: int | str = "-") -> None: + try: + path = uri_to_iri(self.path) + msg = f"{self.command} {path} {self.request_version}" + except AttributeError: + # path isn't set if the requestline was bad + msg = self.requestline + + # Escape control characters that may be in the decoded path. + msg = msg.translate(self._control_char_table) + code = str(code) + + if code[0] == "1": # 1xx - Informational + msg = _ansi_style(msg, "bold") + elif code == "200": # 2xx - Success + pass + elif code == "304": # 304 - Resource Not Modified + msg = _ansi_style(msg, "cyan") + elif code[0] == "3": # 3xx - Redirection + msg = _ansi_style(msg, "green") + elif code == "404": # 404 - Resource Not Found + msg = _ansi_style(msg, "yellow") + elif code[0] == "4": # 4xx - Client Error + msg = _ansi_style(msg, "bold", "red") + else: # 5xx, or any other response + msg = _ansi_style(msg, "bold", "magenta") + + self.log("info", '"%s" %s %s', msg, code, size) + + def log_error(self, format: str, *args: t.Any) -> None: + self.log("error", format, *args) + + def log_message(self, format: str, *args: t.Any) -> None: + self.log("info", format, *args) + + def log(self, type: str, message: str, *args: t.Any) -> None: + _log( + type, + f"{self.address_string()} - - [{self.log_date_time_string()}] {message}\n", + *args, + ) + + +def _ansi_style(value: str, *styles: str) -> str: + if not _log_add_style: + return value + + codes = { + "bold": 1, + "red": 31, + "green": 32, + "yellow": 33, + "magenta": 35, + "cyan": 36, + } + + for style in styles: + value = f"\x1b[{codes[style]}m{value}" + + return f"{value}\x1b[0m" + + +def generate_adhoc_ssl_pair( + cn: str | None = None, +) -> tuple[Certificate, RSAPrivateKeyWithSerialization]: + try: + from cryptography import x509 + from cryptography.x509.oid import NameOID + from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives import hashes + from cryptography.hazmat.primitives.asymmetric import rsa + except ImportError: + raise TypeError( + "Using ad-hoc certificates requires the cryptography library." + ) from None + + backend = default_backend() + pkey = rsa.generate_private_key( + public_exponent=65537, key_size=2048, backend=backend + ) + + # pretty damn sure that this is not actually accepted by anyone + if cn is None: + cn = "*" + + subject = x509.Name( + [ + x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Dummy Certificate"), + x509.NameAttribute(NameOID.COMMON_NAME, cn), + ] + ) + + backend = default_backend() + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(subject) + .public_key(pkey.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(dt.now(timezone.utc)) + .not_valid_after(dt.now(timezone.utc) + timedelta(days=365)) + .add_extension(x509.ExtendedKeyUsage([x509.OID_SERVER_AUTH]), critical=False) + .add_extension(x509.SubjectAlternativeName([x509.DNSName(cn)]), critical=False) + .sign(pkey, hashes.SHA256(), backend) + ) + return cert, pkey + + +def make_ssl_devcert( + base_path: str, host: str | None = None, cn: str | None = None +) -> tuple[str, str]: + """Creates an SSL key for development. This should be used instead of + the ``'adhoc'`` key which generates a new cert on each server start. + It accepts a path for where it should store the key and cert and + either a host or CN. If a host is given it will use the CN + ``*.host/CN=host``. + + For more information see :func:`run_simple`. + + .. versionadded:: 0.9 + + :param base_path: the path to the certificate and key. The extension + ``.crt`` is added for the certificate, ``.key`` is + added for the key. + :param host: the name of the host. This can be used as an alternative + for the `cn`. + :param cn: the `CN` to use. + """ + + if host is not None: + cn = f"*.{host}/CN={host}" + cert, pkey = generate_adhoc_ssl_pair(cn=cn) + + from cryptography.hazmat.primitives import serialization + + cert_file = f"{base_path}.crt" + pkey_file = f"{base_path}.key" + + with open(cert_file, "wb") as f: + f.write(cert.public_bytes(serialization.Encoding.PEM)) + with open(pkey_file, "wb") as f: + f.write( + pkey.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + + return cert_file, pkey_file + + +def generate_adhoc_ssl_context() -> ssl.SSLContext: + """Generates an adhoc SSL context for the development server.""" + import tempfile + import atexit + + cert, pkey = generate_adhoc_ssl_pair() + + from cryptography.hazmat.primitives import serialization + + cert_handle, cert_file = tempfile.mkstemp() + pkey_handle, pkey_file = tempfile.mkstemp() + atexit.register(os.remove, pkey_file) + atexit.register(os.remove, cert_file) + + os.write(cert_handle, cert.public_bytes(serialization.Encoding.PEM)) + os.write( + pkey_handle, + pkey.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ), + ) + + os.close(cert_handle) + os.close(pkey_handle) + ctx = load_ssl_context(cert_file, pkey_file) + return ctx + + +def load_ssl_context( + cert_file: str, pkey_file: str | None = None, protocol: int | None = None +) -> ssl.SSLContext: + """Loads SSL context from cert/private key files and optional protocol. + Many parameters are directly taken from the API of + :py:class:`ssl.SSLContext`. + + :param cert_file: Path of the certificate to use. + :param pkey_file: Path of the private key to use. If not given, the key + will be obtained from the certificate file. + :param protocol: A ``PROTOCOL`` constant from the :mod:`ssl` module. + Defaults to :data:`ssl.PROTOCOL_TLS_SERVER`. + """ + if protocol is None: + protocol = ssl.PROTOCOL_TLS_SERVER + + ctx = ssl.SSLContext(protocol) + ctx.load_cert_chain(cert_file, pkey_file) + return ctx + + +def is_ssl_error(error: Exception | None = None) -> bool: + """Checks if the given error (or the current one) is an SSL error.""" + if error is None: + error = t.cast(Exception, sys.exc_info()[1]) + return isinstance(error, ssl.SSLError) + + +def select_address_family(host: str, port: int) -> socket.AddressFamily: + """Return ``AF_INET4``, ``AF_INET6``, or ``AF_UNIX`` depending on + the host and port.""" + if host.startswith("unix://"): + return socket.AF_UNIX + elif ":" in host and hasattr(socket, "AF_INET6"): + return socket.AF_INET6 + return socket.AF_INET + + +def get_sockaddr( + host: str, port: int, family: socket.AddressFamily +) -> tuple[str, int] | str: + """Return a fully qualified socket address that can be passed to + :func:`socket.bind`.""" + if family == af_unix: + # Absolute path avoids IDNA encoding error when path starts with dot. + return os.path.abspath(host.partition("://")[2]) + try: + res = socket.getaddrinfo( + host, port, family, socket.SOCK_STREAM, socket.IPPROTO_TCP + ) + except socket.gaierror: + return host, port + return res[0][4] # type: ignore + + +def get_interface_ip(family: socket.AddressFamily) -> str: + """Get the IP address of an external interface. Used when binding to + 0.0.0.0 or ::1 to show a more useful URL. + + :meta private: + """ + # arbitrary private address + host = "fd31:f903:5ab5:1::1" if family == socket.AF_INET6 else "10.253.155.219" + + with socket.socket(family, socket.SOCK_DGRAM) as s: + try: + s.connect((host, 58162)) + except OSError: + return "::1" if family == socket.AF_INET6 else "127.0.0.1" + + return s.getsockname()[0] # type: ignore + + +class BaseWSGIServer(HTTPServer): + """A WSGI server that that handles one request at a time. + + Use :func:`make_server` to create a server instance. + """ + + multithread = False + multiprocess = False + request_queue_size = LISTEN_QUEUE + allow_reuse_address = True + + def __init__( + self, + host: str, + port: int, + app: WSGIApplication, + handler: type[WSGIRequestHandler] | None = None, + passthrough_errors: bool = False, + ssl_context: _TSSLContextArg | None = None, + fd: int | None = None, + ) -> None: + if handler is None: + handler = WSGIRequestHandler + + # If the handler doesn't directly set a protocol version and + # thread or process workers are used, then allow chunked + # responses and keep-alive connections by enabling HTTP/1.1. + if "protocol_version" not in vars(handler) and ( + self.multithread or self.multiprocess + ): + handler.protocol_version = "HTTP/1.1" + + self.host = host + self.port = port + self.app = app + self.passthrough_errors = passthrough_errors + + self.address_family = address_family = select_address_family(host, port) + server_address = get_sockaddr(host, int(port), address_family) + + # Remove a leftover Unix socket file from a previous run. Don't + # remove a file that was set up by run_simple. + if address_family == af_unix and fd is None: + server_address = t.cast(str, server_address) + + if os.path.exists(server_address): + os.unlink(server_address) + + # Bind and activate will be handled manually, it should only + # happen if we're not using a socket that was already set up. + super().__init__( + server_address, # type: ignore[arg-type] + handler, + bind_and_activate=False, + ) + + if fd is None: + # No existing socket descriptor, do bind_and_activate=True. + try: + self.server_bind() + self.server_activate() + except OSError as e: + # Catch connection issues and show them without the traceback. Show + # extra instructions for address not found, and for macOS. + self.server_close() + print(e.strerror, file=sys.stderr) + + if e.errno == errno.EADDRINUSE: + print( + f"Port {port} is in use by another program. Either identify and" + " stop that program, or start the server with a different" + " port.", + file=sys.stderr, + ) + + if sys.platform == "darwin" and port == 5000: + print( + "On macOS, try disabling the 'AirPlay Receiver' service" + " from System Preferences -> Sharing.", + file=sys.stderr, + ) + + sys.exit(1) + except BaseException: + self.server_close() + raise + else: + # TCPServer automatically opens a socket even if bind_and_activate is False. + # Close it to silence a ResourceWarning. + self.server_close() + + # Use the passed in socket directly. + self.socket = socket.fromfd(fd, address_family, socket.SOCK_STREAM) + self.server_address = self.socket.getsockname() + + if address_family != af_unix: + # If port was 0, this will record the bound port. + self.port = self.server_address[1] + + if ssl_context is not None: + if isinstance(ssl_context, tuple): + ssl_context = load_ssl_context(*ssl_context) + elif ssl_context == "adhoc": + ssl_context = generate_adhoc_ssl_context() + + self.socket = ssl_context.wrap_socket(self.socket, server_side=True) + self.ssl_context: ssl.SSLContext | None = ssl_context + else: + self.ssl_context = None + + def log(self, type: str, message: str, *args: t.Any) -> None: + _log(type, message, *args) + + def serve_forever(self, poll_interval: float = 0.5) -> None: + try: + super().serve_forever(poll_interval=poll_interval) + except KeyboardInterrupt: + pass + finally: + self.server_close() + + def handle_error( + self, request: t.Any, client_address: tuple[str, int] | str + ) -> None: + if self.passthrough_errors: + raise + + return super().handle_error(request, client_address) + + def log_startup(self) -> None: + """Show information about the address when starting the server.""" + dev_warning = ( + "WARNING: This is a development server. Do not use it in a production" + " deployment. Use a production WSGI server instead." + ) + dev_warning = _ansi_style(dev_warning, "bold", "red") + messages = [dev_warning] + + if self.address_family == af_unix: + messages.append(f" * Running on {self.host}") + else: + scheme = "http" if self.ssl_context is None else "https" + display_hostname = self.host + + if self.host in {"0.0.0.0", "::"}: + messages.append(f" * Running on all addresses ({self.host})") + + if self.host == "0.0.0.0": + localhost = "127.0.0.1" + display_hostname = get_interface_ip(socket.AF_INET) + else: + localhost = "[::1]" + display_hostname = get_interface_ip(socket.AF_INET6) + + messages.append(f" * Running on {scheme}://{localhost}:{self.port}") + + if ":" in display_hostname: + display_hostname = f"[{display_hostname}]" + + messages.append(f" * Running on {scheme}://{display_hostname}:{self.port}") + + _log("info", "\n".join(messages)) + + +class ThreadedWSGIServer(socketserver.ThreadingMixIn, BaseWSGIServer): + """A WSGI server that handles concurrent requests in separate + threads. + + Use :func:`make_server` to create a server instance. + """ + + multithread = True + daemon_threads = True + + +class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): + """A WSGI server that handles concurrent requests in separate forked + processes. + + Use :func:`make_server` to create a server instance. + """ + + multiprocess = True + + def __init__( + self, + host: str, + port: int, + app: WSGIApplication, + processes: int = 40, + handler: type[WSGIRequestHandler] | None = None, + passthrough_errors: bool = False, + ssl_context: _TSSLContextArg | None = None, + fd: int | None = None, + ) -> None: + if not can_fork: + raise ValueError("Your platform does not support forking.") + + super().__init__(host, port, app, handler, passthrough_errors, ssl_context, fd) + self.max_children = processes + + +def make_server( + host: str, + port: int, + app: WSGIApplication, + threaded: bool = False, + processes: int = 1, + request_handler: type[WSGIRequestHandler] | None = None, + passthrough_errors: bool = False, + ssl_context: _TSSLContextArg | None = None, + fd: int | None = None, +) -> BaseWSGIServer: + """Create an appropriate WSGI server instance based on the value of + ``threaded`` and ``processes``. + + This is called from :func:`run_simple`, but can be used separately + to have access to the server object, such as to run it in a separate + thread. + + See :func:`run_simple` for parameter docs. + """ + if threaded and processes > 1: + raise ValueError("Cannot have a multi-thread and multi-process server.") + + if threaded: + return ThreadedWSGIServer( + host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd + ) + + if processes > 1: + return ForkingWSGIServer( + host, + port, + app, + processes, + request_handler, + passthrough_errors, + ssl_context, + fd=fd, + ) + + return BaseWSGIServer( + host, port, app, request_handler, passthrough_errors, ssl_context, fd=fd + ) + + +def is_running_from_reloader() -> bool: + """Check if the server is running as a subprocess within the + Werkzeug reloader. + + .. versionadded:: 0.10 + """ + return os.environ.get("WERKZEUG_RUN_MAIN") == "true" + + +def run_simple( + hostname: str, + port: int, + application: WSGIApplication, + use_reloader: bool = False, + use_debugger: bool = False, + use_evalex: bool = True, + extra_files: t.Iterable[str] | None = None, + exclude_patterns: t.Iterable[str] | None = None, + reloader_interval: int = 1, + reloader_type: str = "auto", + threaded: bool = False, + processes: int = 1, + request_handler: type[WSGIRequestHandler] | None = None, + static_files: dict[str, str | tuple[str, str]] | None = None, + passthrough_errors: bool = False, + ssl_context: _TSSLContextArg | None = None, +) -> None: + """Start a development server for a WSGI application. Various + optional features can be enabled. + + .. warning:: + + Do not use the development server when deploying to production. + It is intended for use only during local development. It is not + designed to be particularly efficient, stable, or secure. + + :param hostname: The host to bind to, for example ``'localhost'``. + Can be a domain, IPv4 or IPv6 address, or file path starting + with ``unix://`` for a Unix socket. + :param port: The port to bind to, for example ``8080``. Using ``0`` + tells the OS to pick a random free port. + :param application: The WSGI application to run. + :param use_reloader: Use a reloader process to restart the server + process when files are changed. + :param use_debugger: Use Werkzeug's debugger, which will show + formatted tracebacks on unhandled exceptions. + :param use_evalex: Make the debugger interactive. A Python terminal + can be opened for any frame in the traceback. Some protection is + provided by requiring a PIN, but this should never be enabled + on a publicly visible server. + :param extra_files: The reloader will watch these files for changes + in addition to Python modules. For example, watch a + configuration file. + :param exclude_patterns: The reloader will ignore changes to any + files matching these :mod:`fnmatch` patterns. For example, + ignore cache files. + :param reloader_interval: How often the reloader tries to check for + changes. + :param reloader_type: The reloader to use. The ``'stat'`` reloader + is built in, but may require significant CPU to watch files. The + ``'watchdog'`` reloader is much more efficient but requires + installing the ``watchdog`` package first. + :param threaded: Handle concurrent requests using threads. Cannot be + used with ``processes``. + :param processes: Handle concurrent requests using up to this number + of processes. Cannot be used with ``threaded``. + :param request_handler: Use a different + :class:`~BaseHTTPServer.BaseHTTPRequestHandler` subclass to + handle requests. + :param static_files: A dict mapping URL prefixes to directories to + serve static files from using + :class:`~werkzeug.middleware.SharedDataMiddleware`. + :param passthrough_errors: Don't catch unhandled exceptions at the + server level, let the server crash instead. If ``use_debugger`` + is enabled, the debugger will still catch such errors. + :param ssl_context: Configure TLS to serve over HTTPS. Can be an + :class:`ssl.SSLContext` object, a ``(cert_file, key_file)`` + tuple to create a typical context, or the string ``'adhoc'`` to + generate a temporary self-signed certificate. + + .. versionchanged:: 2.1 + Instructions are shown for dealing with an "address already in + use" error. + + .. versionchanged:: 2.1 + Running on ``0.0.0.0`` or ``::`` shows the loopback IP in + addition to a real IP. + + .. versionchanged:: 2.1 + The command-line interface was removed. + + .. versionchanged:: 2.0 + Running on ``0.0.0.0`` or ``::`` shows a real IP address that + was bound as well as a warning not to run the development server + in production. + + .. versionchanged:: 2.0 + The ``exclude_patterns`` parameter was added. + + .. versionchanged:: 0.15 + Bind to a Unix socket by passing a ``hostname`` that starts with + ``unix://``. + + .. versionchanged:: 0.10 + Improved the reloader and added support for changing the backend + through the ``reloader_type`` parameter. + + .. versionchanged:: 0.9 + A command-line interface was added. + + .. versionchanged:: 0.8 + ``ssl_context`` can be a tuple of paths to the certificate and + private key files. + + .. versionchanged:: 0.6 + The ``ssl_context`` parameter was added. + + .. versionchanged:: 0.5 + The ``static_files`` and ``passthrough_errors`` parameters were + added. + """ + if not isinstance(port, int): + raise TypeError("port must be an integer") + + if static_files: + from .middleware.shared_data import SharedDataMiddleware + + application = SharedDataMiddleware(application, static_files) + + if use_debugger: + from .debug import DebuggedApplication + + application = DebuggedApplication(application, evalex=use_evalex) + + if not is_running_from_reloader(): + fd = None + else: + fd = int(os.environ["WERKZEUG_SERVER_FD"]) + + srv = make_server( + hostname, + port, + application, + threaded, + processes, + request_handler, + passthrough_errors, + ssl_context, + fd=fd, + ) + srv.socket.set_inheritable(True) + os.environ["WERKZEUG_SERVER_FD"] = str(srv.fileno()) + + if not is_running_from_reloader(): + srv.log_startup() + _log("info", _ansi_style("Press CTRL+C to quit", "yellow")) + + if use_reloader: + from ._reloader import run_with_reloader + + try: + run_with_reloader( + srv.serve_forever, + extra_files=extra_files, + exclude_patterns=exclude_patterns, + interval=reloader_interval, + reloader_type=reloader_type, + ) + finally: + srv.server_close() + else: + srv.serve_forever() diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/test.py b/backend/test/lib/python3.8/site-packages/werkzeug/test.py new file mode 100644 index 0000000000000000000000000000000000000000..968553f2b268819a04b81823558d82e049952a78 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/test.py @@ -0,0 +1,1545 @@ +from __future__ import annotations + +import dataclasses +import mimetypes +import sys +import typing as t +import warnings +from collections import defaultdict +from datetime import datetime +from io import BytesIO +from itertools import chain +from random import random +from tempfile import TemporaryFile +from time import time +from urllib.parse import unquote +from urllib.parse import urlsplit +from urllib.parse import urlunsplit + +from ._internal import _get_environ +from ._internal import _make_encode_wrapper +from ._internal import _wsgi_decoding_dance +from ._internal import _wsgi_encoding_dance +from .datastructures import Authorization +from .datastructures import CallbackDict +from .datastructures import CombinedMultiDict +from .datastructures import EnvironHeaders +from .datastructures import FileMultiDict +from .datastructures import Headers +from .datastructures import MultiDict +from .http import dump_cookie +from .http import dump_options_header +from .http import parse_cookie +from .http import parse_date +from .http import parse_options_header +from .sansio.multipart import Data +from .sansio.multipart import Epilogue +from .sansio.multipart import Field +from .sansio.multipart import File +from .sansio.multipart import MultipartEncoder +from .sansio.multipart import Preamble +from .urls import _urlencode +from .urls import iri_to_uri +from .utils import cached_property +from .utils import get_content_type +from .wrappers.request import Request +from .wrappers.response import Response +from .wsgi import ClosingIterator +from .wsgi import get_current_url + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + import typing_extensions as te + + +def stream_encode_multipart( + data: t.Mapping[str, t.Any], + use_tempfile: bool = True, + threshold: int = 1024 * 500, + boundary: str | None = None, + charset: str | None = None, +) -> tuple[t.IO[bytes], int, str]: + """Encode a dict of values (either strings or file descriptors or + :class:`FileStorage` objects.) into a multipart encoded string stored + in a file descriptor. + + .. versionchanged:: 2.3 + The ``charset`` parameter is deprecated and will be removed in Werkzeug 3.0 + """ + if charset is not None: + warnings.warn( + "The 'charset' parameter is deprecated and will be removed in Werkzeug 3.0", + DeprecationWarning, + stacklevel=2, + ) + else: + charset = "utf-8" + + if boundary is None: + boundary = f"---------------WerkzeugFormPart_{time()}{random()}" + + stream: t.IO[bytes] = BytesIO() + total_length = 0 + on_disk = False + write_binary: t.Callable[[bytes], int] + + if use_tempfile: + + def write_binary(s: bytes) -> int: + nonlocal stream, total_length, on_disk + + if on_disk: + return stream.write(s) + else: + length = len(s) + + if length + total_length <= threshold: + stream.write(s) + else: + new_stream = t.cast(t.IO[bytes], TemporaryFile("wb+")) + new_stream.write(stream.getvalue()) # type: ignore + new_stream.write(s) + stream = new_stream + on_disk = True + + total_length += length + return length + + else: + write_binary = stream.write + + encoder = MultipartEncoder(boundary.encode()) + write_binary(encoder.send_event(Preamble(data=b""))) + for key, value in _iter_data(data): + reader = getattr(value, "read", None) + if reader is not None: + filename = getattr(value, "filename", getattr(value, "name", None)) + content_type = getattr(value, "content_type", None) + if content_type is None: + content_type = ( + filename + and mimetypes.guess_type(filename)[0] + or "application/octet-stream" + ) + headers = value.headers + headers.update([("Content-Type", content_type)]) + if filename is None: + write_binary(encoder.send_event(Field(name=key, headers=headers))) + else: + write_binary( + encoder.send_event( + File(name=key, filename=filename, headers=headers) + ) + ) + while True: + chunk = reader(16384) + + if not chunk: + write_binary(encoder.send_event(Data(data=chunk, more_data=False))) + break + + write_binary(encoder.send_event(Data(data=chunk, more_data=True))) + else: + if not isinstance(value, str): + value = str(value) + write_binary(encoder.send_event(Field(name=key, headers=Headers()))) + write_binary( + encoder.send_event(Data(data=value.encode(charset), more_data=False)) + ) + + write_binary(encoder.send_event(Epilogue(data=b""))) + + length = stream.tell() + stream.seek(0) + return stream, length, boundary + + +def encode_multipart( + values: t.Mapping[str, t.Any], + boundary: str | None = None, + charset: str | None = None, +) -> tuple[str, bytes]: + """Like `stream_encode_multipart` but returns a tuple in the form + (``boundary``, ``data``) where data is bytes. + + .. versionchanged:: 2.3 + The ``charset`` parameter is deprecated and will be removed in Werkzeug 3.0 + """ + stream, length, boundary = stream_encode_multipart( + values, use_tempfile=False, boundary=boundary, charset=charset + ) + return boundary, stream.read() + + +def _iter_data(data: t.Mapping[str, t.Any]) -> t.Iterator[tuple[str, t.Any]]: + """Iterate over a mapping that might have a list of values, yielding + all key, value pairs. Almost like iter_multi_items but only allows + lists, not tuples, of values so tuples can be used for files. + """ + if isinstance(data, MultiDict): + yield from data.items(multi=True) + else: + for key, value in data.items(): + if isinstance(value, list): + for v in value: + yield key, v + else: + yield key, value + + +_TAnyMultiDict = t.TypeVar("_TAnyMultiDict", bound=MultiDict) + + +class EnvironBuilder: + """This class can be used to conveniently create a WSGI environment + for testing purposes. It can be used to quickly create WSGI environments + or request objects from arbitrary data. + + The signature of this class is also used in some other places as of + Werkzeug 0.5 (:func:`create_environ`, :meth:`Response.from_values`, + :meth:`Client.open`). Because of this most of the functionality is + available through the constructor alone. + + Files and regular form data can be manipulated independently of each + other with the :attr:`form` and :attr:`files` attributes, but are + passed with the same argument to the constructor: `data`. + + `data` can be any of these values: + + - a `str` or `bytes` object: The object is converted into an + :attr:`input_stream`, the :attr:`content_length` is set and you have to + provide a :attr:`content_type`. + - a `dict` or :class:`MultiDict`: The keys have to be strings. The values + have to be either any of the following objects, or a list of any of the + following objects: + + - a :class:`file`-like object: These are converted into + :class:`FileStorage` objects automatically. + - a `tuple`: The :meth:`~FileMultiDict.add_file` method is called + with the key and the unpacked `tuple` items as positional + arguments. + - a `str`: The string is set as form data for the associated key. + - a file-like object: The object content is loaded in memory and then + handled like a regular `str` or a `bytes`. + + :param path: the path of the request. In the WSGI environment this will + end up as `PATH_INFO`. If the `query_string` is not defined + and there is a question mark in the `path` everything after + it is used as query string. + :param base_url: the base URL is a URL that is used to extract the WSGI + URL scheme, host (server name + server port) and the + script root (`SCRIPT_NAME`). + :param query_string: an optional string or dict with URL parameters. + :param method: the HTTP method to use, defaults to `GET`. + :param input_stream: an optional input stream. Do not specify this and + `data`. As soon as an input stream is set you can't + modify :attr:`args` and :attr:`files` unless you + set the :attr:`input_stream` to `None` again. + :param content_type: The content type for the request. As of 0.5 you + don't have to provide this when specifying files + and form data via `data`. + :param content_length: The content length for the request. You don't + have to specify this when providing data via + `data`. + :param errors_stream: an optional error stream that is used for + `wsgi.errors`. Defaults to :data:`stderr`. + :param multithread: controls `wsgi.multithread`. Defaults to `False`. + :param multiprocess: controls `wsgi.multiprocess`. Defaults to `False`. + :param run_once: controls `wsgi.run_once`. Defaults to `False`. + :param headers: an optional list or :class:`Headers` object of headers. + :param data: a string or dict of form data or a file-object. + See explanation above. + :param json: An object to be serialized and assigned to ``data``. + Defaults the content type to ``"application/json"``. + Serialized with the function assigned to :attr:`json_dumps`. + :param environ_base: an optional dict of environment defaults. + :param environ_overrides: an optional dict of environment overrides. + :param auth: An authorization object to use for the + ``Authorization`` header value. A ``(username, password)`` tuple + is a shortcut for ``Basic`` authorization. + + .. versionchanged:: 2.3 + The ``charset`` parameter is deprecated and will be removed in Werkzeug 3.0 + + .. versionchanged:: 2.1 + ``CONTENT_TYPE`` and ``CONTENT_LENGTH`` are not duplicated as + header keys in the environ. + + .. versionchanged:: 2.0 + ``REQUEST_URI`` and ``RAW_URI`` is the full raw URI including + the query string, not only the path. + + .. versionchanged:: 2.0 + The default :attr:`request_class` is ``Request`` instead of + ``BaseRequest``. + + .. versionadded:: 2.0 + Added the ``auth`` parameter. + + .. versionadded:: 0.15 + The ``json`` param and :meth:`json_dumps` method. + + .. versionadded:: 0.15 + The environ has keys ``REQUEST_URI`` and ``RAW_URI`` containing + the path before percent-decoding. This is not part of the WSGI + PEP, but many WSGI servers include it. + + .. versionchanged:: 0.6 + ``path`` and ``base_url`` can now be unicode strings that are + encoded with :func:`iri_to_uri`. + """ + + #: the server protocol to use. defaults to HTTP/1.1 + server_protocol = "HTTP/1.1" + + #: the wsgi version to use. defaults to (1, 0) + wsgi_version = (1, 0) + + #: The default request class used by :meth:`get_request`. + request_class = Request + + import json + + #: The serialization function used when ``json`` is passed. + json_dumps = staticmethod(json.dumps) + del json + + _args: MultiDict | None + _query_string: str | None + _input_stream: t.IO[bytes] | None + _form: MultiDict | None + _files: FileMultiDict | None + + def __init__( + self, + path: str = "/", + base_url: str | None = None, + query_string: t.Mapping[str, str] | str | None = None, + method: str = "GET", + input_stream: t.IO[bytes] | None = None, + content_type: str | None = None, + content_length: int | None = None, + errors_stream: t.IO[str] | None = None, + multithread: bool = False, + multiprocess: bool = False, + run_once: bool = False, + headers: Headers | t.Iterable[tuple[str, str]] | None = None, + data: None | (t.IO[bytes] | str | bytes | t.Mapping[str, t.Any]) = None, + environ_base: t.Mapping[str, t.Any] | None = None, + environ_overrides: t.Mapping[str, t.Any] | None = None, + charset: str | None = None, + mimetype: str | None = None, + json: t.Mapping[str, t.Any] | None = None, + auth: Authorization | tuple[str, str] | None = None, + ) -> None: + path_s = _make_encode_wrapper(path) + if query_string is not None and path_s("?") in path: + raise ValueError("Query string is defined in the path and as an argument") + request_uri = urlsplit(path) + if query_string is None and path_s("?") in path: + query_string = request_uri.query + + if charset is not None: + warnings.warn( + "The 'charset' parameter is deprecated and will be" + " removed in Werkzeug 3.0", + DeprecationWarning, + stacklevel=2, + ) + else: + charset = "utf-8" + + self.charset = charset + self.path = iri_to_uri(request_uri.path) + self.request_uri = path + if base_url is not None: + base_url = iri_to_uri( + base_url, charset=charset if charset != "utf-8" else None + ) + self.base_url = base_url # type: ignore + if isinstance(query_string, str): + self.query_string = query_string + else: + if query_string is None: + query_string = MultiDict() + elif not isinstance(query_string, MultiDict): + query_string = MultiDict(query_string) + self.args = query_string + self.method = method + if headers is None: + headers = Headers() + elif not isinstance(headers, Headers): + headers = Headers(headers) + self.headers = headers + if content_type is not None: + self.content_type = content_type + if errors_stream is None: + errors_stream = sys.stderr + self.errors_stream = errors_stream + self.multithread = multithread + self.multiprocess = multiprocess + self.run_once = run_once + self.environ_base = environ_base + self.environ_overrides = environ_overrides + self.input_stream = input_stream + self.content_length = content_length + self.closed = False + + if auth is not None: + if isinstance(auth, tuple): + auth = Authorization( + "basic", {"username": auth[0], "password": auth[1]} + ) + + self.headers.set("Authorization", auth.to_header()) + + if json is not None: + if data is not None: + raise TypeError("can't provide both json and data") + + data = self.json_dumps(json) + + if self.content_type is None: + self.content_type = "application/json" + + if data: + if input_stream is not None: + raise TypeError("can't provide input stream and data") + if hasattr(data, "read"): + data = data.read() + if isinstance(data, str): + data = data.encode(self.charset) + if isinstance(data, bytes): + self.input_stream = BytesIO(data) + if self.content_length is None: + self.content_length = len(data) + else: + for key, value in _iter_data(data): + if isinstance(value, (tuple, dict)) or hasattr(value, "read"): + self._add_file_from_data(key, value) + else: + self.form.setlistdefault(key).append(value) + + if mimetype is not None: + self.mimetype = mimetype + + @classmethod + def from_environ(cls, environ: WSGIEnvironment, **kwargs: t.Any) -> EnvironBuilder: + """Turn an environ dict back into a builder. Any extra kwargs + override the args extracted from the environ. + + .. versionchanged:: 2.0 + Path and query values are passed through the WSGI decoding + dance to avoid double encoding. + + .. versionadded:: 0.15 + """ + headers = Headers(EnvironHeaders(environ)) + out = { + "path": _wsgi_decoding_dance(environ["PATH_INFO"]), + "base_url": cls._make_base_url( + environ["wsgi.url_scheme"], + headers.pop("Host"), + _wsgi_decoding_dance(environ["SCRIPT_NAME"]), + ), + "query_string": _wsgi_decoding_dance(environ["QUERY_STRING"]), + "method": environ["REQUEST_METHOD"], + "input_stream": environ["wsgi.input"], + "content_type": headers.pop("Content-Type", None), + "content_length": headers.pop("Content-Length", None), + "errors_stream": environ["wsgi.errors"], + "multithread": environ["wsgi.multithread"], + "multiprocess": environ["wsgi.multiprocess"], + "run_once": environ["wsgi.run_once"], + "headers": headers, + } + out.update(kwargs) + return cls(**out) + + def _add_file_from_data( + self, + key: str, + value: (t.IO[bytes] | tuple[t.IO[bytes], str] | tuple[t.IO[bytes], str, str]), + ) -> None: + """Called in the EnvironBuilder to add files from the data dict.""" + if isinstance(value, tuple): + self.files.add_file(key, *value) + else: + self.files.add_file(key, value) + + @staticmethod + def _make_base_url(scheme: str, host: str, script_root: str) -> str: + return urlunsplit((scheme, host, script_root, "", "")).rstrip("/") + "/" + + @property + def base_url(self) -> str: + """The base URL is used to extract the URL scheme, host name, + port, and root path. + """ + return self._make_base_url(self.url_scheme, self.host, self.script_root) + + @base_url.setter + def base_url(self, value: str | None) -> None: + if value is None: + scheme = "http" + netloc = "localhost" + script_root = "" + else: + scheme, netloc, script_root, qs, anchor = urlsplit(value) + if qs or anchor: + raise ValueError("base url must not contain a query string or fragment") + self.script_root = script_root.rstrip("/") + self.host = netloc + self.url_scheme = scheme + + @property + def content_type(self) -> str | None: + """The content type for the request. Reflected from and to + the :attr:`headers`. Do not set if you set :attr:`files` or + :attr:`form` for auto detection. + """ + ct = self.headers.get("Content-Type") + if ct is None and not self._input_stream: + if self._files: + return "multipart/form-data" + if self._form: + return "application/x-www-form-urlencoded" + return None + return ct + + @content_type.setter + def content_type(self, value: str | None) -> None: + if value is None: + self.headers.pop("Content-Type", None) + else: + self.headers["Content-Type"] = value + + @property + def mimetype(self) -> str | None: + """The mimetype (content type without charset etc.) + + .. versionadded:: 0.14 + """ + ct = self.content_type + return ct.split(";")[0].strip() if ct else None + + @mimetype.setter + def mimetype(self, value: str) -> None: + self.content_type = get_content_type(value, self.charset) + + @property + def mimetype_params(self) -> t.Mapping[str, str]: + """The mimetype parameters as dict. For example if the + content type is ``text/html; charset=utf-8`` the params would be + ``{'charset': 'utf-8'}``. + + .. versionadded:: 0.14 + """ + + def on_update(d: CallbackDict) -> None: + self.headers["Content-Type"] = dump_options_header(self.mimetype, d) + + d = parse_options_header(self.headers.get("content-type", ""))[1] + return CallbackDict(d, on_update) + + @property + def content_length(self) -> int | None: + """The content length as integer. Reflected from and to the + :attr:`headers`. Do not set if you set :attr:`files` or + :attr:`form` for auto detection. + """ + return self.headers.get("Content-Length", type=int) + + @content_length.setter + def content_length(self, value: int | None) -> None: + if value is None: + self.headers.pop("Content-Length", None) + else: + self.headers["Content-Length"] = str(value) + + def _get_form(self, name: str, storage: type[_TAnyMultiDict]) -> _TAnyMultiDict: + """Common behavior for getting the :attr:`form` and + :attr:`files` properties. + + :param name: Name of the internal cached attribute. + :param storage: Storage class used for the data. + """ + if self.input_stream is not None: + raise AttributeError("an input stream is defined") + + rv = getattr(self, name) + + if rv is None: + rv = storage() + setattr(self, name, rv) + + return rv # type: ignore + + def _set_form(self, name: str, value: MultiDict) -> None: + """Common behavior for setting the :attr:`form` and + :attr:`files` properties. + + :param name: Name of the internal cached attribute. + :param value: Value to assign to the attribute. + """ + self._input_stream = None + setattr(self, name, value) + + @property + def form(self) -> MultiDict: + """A :class:`MultiDict` of form values.""" + return self._get_form("_form", MultiDict) + + @form.setter + def form(self, value: MultiDict) -> None: + self._set_form("_form", value) + + @property + def files(self) -> FileMultiDict: + """A :class:`FileMultiDict` of uploaded files. Use + :meth:`~FileMultiDict.add_file` to add new files. + """ + return self._get_form("_files", FileMultiDict) + + @files.setter + def files(self, value: FileMultiDict) -> None: + self._set_form("_files", value) + + @property + def input_stream(self) -> t.IO[bytes] | None: + """An optional input stream. This is mutually exclusive with + setting :attr:`form` and :attr:`files`, setting it will clear + those. Do not provide this if the method is not ``POST`` or + another method that has a body. + """ + return self._input_stream + + @input_stream.setter + def input_stream(self, value: t.IO[bytes] | None) -> None: + self._input_stream = value + self._form = None + self._files = None + + @property + def query_string(self) -> str: + """The query string. If you set this to a string + :attr:`args` will no longer be available. + """ + if self._query_string is None: + if self._args is not None: + return _urlencode(self._args, encoding=self.charset) + return "" + return self._query_string + + @query_string.setter + def query_string(self, value: str | None) -> None: + self._query_string = value + self._args = None + + @property + def args(self) -> MultiDict: + """The URL arguments as :class:`MultiDict`.""" + if self._query_string is not None: + raise AttributeError("a query string is defined") + if self._args is None: + self._args = MultiDict() + return self._args + + @args.setter + def args(self, value: MultiDict | None) -> None: + self._query_string = None + self._args = value + + @property + def server_name(self) -> str: + """The server name (read-only, use :attr:`host` to set)""" + return self.host.split(":", 1)[0] + + @property + def server_port(self) -> int: + """The server port as integer (read-only, use :attr:`host` to set)""" + pieces = self.host.split(":", 1) + + if len(pieces) == 2: + try: + return int(pieces[1]) + except ValueError: + pass + + if self.url_scheme == "https": + return 443 + return 80 + + def __del__(self) -> None: + try: + self.close() + except Exception: + pass + + def close(self) -> None: + """Closes all files. If you put real :class:`file` objects into the + :attr:`files` dict you can call this method to automatically close + them all in one go. + """ + if self.closed: + return + try: + files = self.files.values() + except AttributeError: + files = () # type: ignore + for f in files: + try: + f.close() + except Exception: + pass + self.closed = True + + def get_environ(self) -> WSGIEnvironment: + """Return the built environ. + + .. versionchanged:: 0.15 + The content type and length headers are set based on + input stream detection. Previously this only set the WSGI + keys. + """ + input_stream = self.input_stream + content_length = self.content_length + + mimetype = self.mimetype + content_type = self.content_type + + if input_stream is not None: + start_pos = input_stream.tell() + input_stream.seek(0, 2) + end_pos = input_stream.tell() + input_stream.seek(start_pos) + content_length = end_pos - start_pos + elif mimetype == "multipart/form-data": + charset = self.charset if self.charset != "utf-8" else None + input_stream, content_length, boundary = stream_encode_multipart( + CombinedMultiDict([self.form, self.files]), charset=charset + ) + content_type = f'{mimetype}; boundary="{boundary}"' + elif mimetype == "application/x-www-form-urlencoded": + form_encoded = _urlencode(self.form, encoding=self.charset).encode("ascii") + content_length = len(form_encoded) + input_stream = BytesIO(form_encoded) + else: + input_stream = BytesIO() + + result: WSGIEnvironment = {} + if self.environ_base: + result.update(self.environ_base) + + def _path_encode(x: str) -> str: + return _wsgi_encoding_dance(unquote(x, encoding=self.charset), self.charset) + + raw_uri = _wsgi_encoding_dance(self.request_uri, self.charset) + result.update( + { + "REQUEST_METHOD": self.method, + "SCRIPT_NAME": _path_encode(self.script_root), + "PATH_INFO": _path_encode(self.path), + "QUERY_STRING": _wsgi_encoding_dance(self.query_string, self.charset), + # Non-standard, added by mod_wsgi, uWSGI + "REQUEST_URI": raw_uri, + # Non-standard, added by gunicorn + "RAW_URI": raw_uri, + "SERVER_NAME": self.server_name, + "SERVER_PORT": str(self.server_port), + "HTTP_HOST": self.host, + "SERVER_PROTOCOL": self.server_protocol, + "wsgi.version": self.wsgi_version, + "wsgi.url_scheme": self.url_scheme, + "wsgi.input": input_stream, + "wsgi.errors": self.errors_stream, + "wsgi.multithread": self.multithread, + "wsgi.multiprocess": self.multiprocess, + "wsgi.run_once": self.run_once, + } + ) + + headers = self.headers.copy() + # Don't send these as headers, they're part of the environ. + headers.remove("Content-Type") + headers.remove("Content-Length") + + if content_type is not None: + result["CONTENT_TYPE"] = content_type + + if content_length is not None: + result["CONTENT_LENGTH"] = str(content_length) + + combined_headers = defaultdict(list) + + for key, value in headers.to_wsgi_list(): + combined_headers[f"HTTP_{key.upper().replace('-', '_')}"].append(value) + + for key, values in combined_headers.items(): + result[key] = ", ".join(values) + + if self.environ_overrides: + result.update(self.environ_overrides) + + return result + + def get_request(self, cls: type[Request] | None = None) -> Request: + """Returns a request with the data. If the request class is not + specified :attr:`request_class` is used. + + :param cls: The request wrapper to use. + """ + if cls is None: + cls = self.request_class + + return cls(self.get_environ()) + + +class ClientRedirectError(Exception): + """If a redirect loop is detected when using follow_redirects=True with + the :cls:`Client`, then this exception is raised. + """ + + +class Client: + """Simulate sending requests to a WSGI application without running a WSGI or HTTP + server. + + :param application: The WSGI application to make requests to. + :param response_wrapper: A :class:`.Response` class to wrap response data with. + Defaults to :class:`.TestResponse`. If it's not a subclass of ``TestResponse``, + one will be created. + :param use_cookies: Persist cookies from ``Set-Cookie`` response headers to the + ``Cookie`` header in subsequent requests. Domain and path matching is supported, + but other cookie parameters are ignored. + :param allow_subdomain_redirects: Allow requests to follow redirects to subdomains. + Enable this if the application handles subdomains and redirects between them. + + .. versionchanged:: 2.3 + Simplify cookie implementation, support domain and path matching. + + .. versionchanged:: 2.1 + All data is available as properties on the returned response object. The + response cannot be returned as a tuple. + + .. versionchanged:: 2.0 + ``response_wrapper`` is always a subclass of :class:``TestResponse``. + + .. versionchanged:: 0.5 + Added the ``use_cookies`` parameter. + """ + + def __init__( + self, + application: WSGIApplication, + response_wrapper: type[Response] | None = None, + use_cookies: bool = True, + allow_subdomain_redirects: bool = False, + ) -> None: + self.application = application + + if response_wrapper in {None, Response}: + response_wrapper = TestResponse + elif not isinstance(response_wrapper, TestResponse): + response_wrapper = type( + "WrapperTestResponse", + (TestResponse, response_wrapper), # type: ignore + {}, + ) + + self.response_wrapper = t.cast(t.Type["TestResponse"], response_wrapper) + + if use_cookies: + self._cookies: dict[tuple[str, str, str], Cookie] | None = {} + else: + self._cookies = None + + self.allow_subdomain_redirects = allow_subdomain_redirects + + @property + def cookie_jar(self) -> t.Iterable[Cookie] | None: + warnings.warn( + "The 'cookie_jar' attribute is a private API and will be removed in" + " Werkzeug 3.0. Use the 'get_cookie' method instead.", + DeprecationWarning, + stacklevel=2, + ) + + if self._cookies is None: + return None + + return self._cookies.values() + + def get_cookie( + self, key: str, domain: str = "localhost", path: str = "/" + ) -> Cookie | None: + """Return a :class:`.Cookie` if it exists. Cookies are uniquely identified by + ``(domain, path, key)``. + + :param key: The decoded form of the key for the cookie. + :param domain: The domain the cookie was set for. + :param path: The path the cookie was set for. + + .. versionadded:: 2.3 + """ + if self._cookies is None: + raise TypeError( + "Cookies are disabled. Create a client with 'use_cookies=True'." + ) + + return self._cookies.get((domain, path, key)) + + def set_cookie( + self, + key: str, + value: str = "", + *args: t.Any, + domain: str = "localhost", + origin_only: bool = True, + path: str = "/", + **kwargs: t.Any, + ) -> None: + """Set a cookie to be sent in subsequent requests. + + This is a convenience to skip making a test request to a route that would set + the cookie. To test the cookie, make a test request to a route that uses the + cookie value. + + The client uses ``domain``, ``origin_only``, and ``path`` to determine which + cookies to send with a request. It does not use other cookie parameters that + browsers use, since they're not applicable in tests. + + :param key: The key part of the cookie. + :param value: The value part of the cookie. + :param domain: Send this cookie with requests that match this domain. If + ``origin_only`` is true, it must be an exact match, otherwise it may be a + suffix match. + :param origin_only: Whether the domain must be an exact match to the request. + :param path: Send this cookie with requests that match this path either exactly + or as a prefix. + :param kwargs: Passed to :func:`.dump_cookie`. + + .. versionchanged:: 2.3 + The ``origin_only`` parameter was added. + + .. versionchanged:: 2.3 + The ``domain`` parameter defaults to ``localhost``. + + .. versionchanged:: 2.3 + The first parameter ``server_name`` is deprecated and will be removed in + Werkzeug 3.0. The first parameter is ``key``. Use the ``domain`` and + ``origin_only`` parameters instead. + """ + if self._cookies is None: + raise TypeError( + "Cookies are disabled. Create a client with 'use_cookies=True'." + ) + + if args: + warnings.warn( + "The first parameter 'server_name' is no longer used, and will be" + " removed in Werkzeug 3.0. The positional parameters are 'key' and" + " 'value'. Use the 'domain' and 'origin_only' parameters instead.", + DeprecationWarning, + stacklevel=2, + ) + domain = key + key = value + value = args[0] + + cookie = Cookie._from_response_header( + domain, "/", dump_cookie(key, value, domain=domain, path=path, **kwargs) + ) + cookie.origin_only = origin_only + + if cookie._should_delete: + self._cookies.pop(cookie._storage_key, None) + else: + self._cookies[cookie._storage_key] = cookie + + def delete_cookie( + self, + key: str, + *args: t.Any, + domain: str = "localhost", + path: str = "/", + **kwargs: t.Any, + ) -> None: + """Delete a cookie if it exists. Cookies are uniquely identified by + ``(domain, path, key)``. + + :param key: The decoded form of the key for the cookie. + :param domain: The domain the cookie was set for. + :param path: The path the cookie was set for. + + .. versionchanged:: 2.3 + The ``domain`` parameter defaults to ``localhost``. + + .. versionchanged:: 2.3 + The first parameter ``server_name`` is deprecated and will be removed in + Werkzeug 3.0. The first parameter is ``key``. Use the ``domain`` parameter + instead. + + .. versionchanged:: 2.3 + The ``secure``, ``httponly`` and ``samesite`` parameters are deprecated and + will be removed in Werkzeug 2.4. + """ + if self._cookies is None: + raise TypeError( + "Cookies are disabled. Create a client with 'use_cookies=True'." + ) + + if args: + warnings.warn( + "The first parameter 'server_name' is no longer used, and will be" + " removed in Werkzeug 2.4. The first parameter is 'key'. Use the" + " 'domain' parameter instead.", + DeprecationWarning, + stacklevel=2, + ) + domain = key + key = args[0] + + if kwargs: + kwargs_keys = ", ".join(f"'{k}'" for k in kwargs) + plural = "parameters are" if len(kwargs) > 1 else "parameter is" + warnings.warn( + f"The {kwargs_keys} {plural} deprecated and will be" + f" removed in Werkzeug 2.4.", + DeprecationWarning, + stacklevel=2, + ) + + self._cookies.pop((domain, path, key), None) + + def _add_cookies_to_wsgi(self, environ: WSGIEnvironment) -> None: + """If cookies are enabled, set the ``Cookie`` header in the environ to the + cookies that are applicable to the request host and path. + + :meta private: + + .. versionadded:: 2.3 + """ + if self._cookies is None: + return + + url = urlsplit(get_current_url(environ)) + server_name = url.hostname or "localhost" + value = "; ".join( + c._to_request_header() + for c in self._cookies.values() + if c._matches_request(server_name, url.path) + ) + + if value: + environ["HTTP_COOKIE"] = value + else: + environ.pop("HTTP_COOKIE", None) + + def _update_cookies_from_response( + self, server_name: str, path: str, headers: list[str] + ) -> None: + """If cookies are enabled, update the stored cookies from any ``Set-Cookie`` + headers in the response. + + :meta private: + + .. versionadded:: 2.3 + """ + if self._cookies is None: + return + + for header in headers: + cookie = Cookie._from_response_header(server_name, path, header) + + if cookie._should_delete: + self._cookies.pop(cookie._storage_key, None) + else: + self._cookies[cookie._storage_key] = cookie + + def run_wsgi_app( + self, environ: WSGIEnvironment, buffered: bool = False + ) -> tuple[t.Iterable[bytes], str, Headers]: + """Runs the wrapped WSGI app with the given environment. + + :meta private: + """ + self._add_cookies_to_wsgi(environ) + rv = run_wsgi_app(self.application, environ, buffered=buffered) + url = urlsplit(get_current_url(environ)) + self._update_cookies_from_response( + url.hostname or "localhost", url.path, rv[2].getlist("Set-Cookie") + ) + return rv + + def resolve_redirect( + self, response: TestResponse, buffered: bool = False + ) -> TestResponse: + """Perform a new request to the location given by the redirect + response to the previous request. + + :meta private: + """ + scheme, netloc, path, qs, anchor = urlsplit(response.location) + builder = EnvironBuilder.from_environ( + response.request.environ, path=path, query_string=qs + ) + + to_name_parts = netloc.split(":", 1)[0].split(".") + from_name_parts = builder.server_name.split(".") + + if to_name_parts != [""]: + # The new location has a host, use it for the base URL. + builder.url_scheme = scheme + builder.host = netloc + else: + # A local redirect with autocorrect_location_header=False + # doesn't have a host, so use the request's host. + to_name_parts = from_name_parts + + # Explain why a redirect to a different server name won't be followed. + if to_name_parts != from_name_parts: + if to_name_parts[-len(from_name_parts) :] == from_name_parts: + if not self.allow_subdomain_redirects: + raise RuntimeError("Following subdomain redirects is not enabled.") + else: + raise RuntimeError("Following external redirects is not supported.") + + path_parts = path.split("/") + root_parts = builder.script_root.split("/") + + if path_parts[: len(root_parts)] == root_parts: + # Strip the script root from the path. + builder.path = path[len(builder.script_root) :] + else: + # The new location is not under the script root, so use the + # whole path and clear the previous root. + builder.path = path + builder.script_root = "" + + # Only 307 and 308 preserve all of the original request. + if response.status_code not in {307, 308}: + # HEAD is preserved, everything else becomes GET. + if builder.method != "HEAD": + builder.method = "GET" + + # Clear the body and the headers that describe it. + + if builder.input_stream is not None: + builder.input_stream.close() + builder.input_stream = None + + builder.content_type = None + builder.content_length = None + builder.headers.pop("Transfer-Encoding", None) + + return self.open(builder, buffered=buffered) + + def open( + self, + *args: t.Any, + buffered: bool = False, + follow_redirects: bool = False, + **kwargs: t.Any, + ) -> TestResponse: + """Generate an environ dict from the given arguments, make a + request to the application using it, and return the response. + + :param args: Passed to :class:`EnvironBuilder` to create the + environ for the request. If a single arg is passed, it can + be an existing :class:`EnvironBuilder` or an environ dict. + :param buffered: Convert the iterator returned by the app into + a list. If the iterator has a ``close()`` method, it is + called automatically. + :param follow_redirects: Make additional requests to follow HTTP + redirects until a non-redirect status is returned. + :attr:`TestResponse.history` lists the intermediate + responses. + + .. versionchanged:: 2.1 + Removed the ``as_tuple`` parameter. + + .. versionchanged:: 2.0 + The request input stream is closed when calling + ``response.close()``. Input streams for redirects are + automatically closed. + + .. versionchanged:: 0.5 + If a dict is provided as file in the dict for the ``data`` + parameter the content type has to be called ``content_type`` + instead of ``mimetype``. This change was made for + consistency with :class:`werkzeug.FileWrapper`. + + .. versionchanged:: 0.5 + Added the ``follow_redirects`` parameter. + """ + request: Request | None = None + + if not kwargs and len(args) == 1: + arg = args[0] + + if isinstance(arg, EnvironBuilder): + request = arg.get_request() + elif isinstance(arg, dict): + request = EnvironBuilder.from_environ(arg).get_request() + elif isinstance(arg, Request): + request = arg + + if request is None: + builder = EnvironBuilder(*args, **kwargs) + + try: + request = builder.get_request() + finally: + builder.close() + + response = self.run_wsgi_app(request.environ, buffered=buffered) + response = self.response_wrapper(*response, request=request) + + redirects = set() + history: list[TestResponse] = [] + + if not follow_redirects: + return response + + while response.status_code in { + 301, + 302, + 303, + 305, + 307, + 308, + }: + # Exhaust intermediate response bodies to ensure middleware + # that returns an iterator runs any cleanup code. + if not buffered: + response.make_sequence() + response.close() + + new_redirect_entry = (response.location, response.status_code) + + if new_redirect_entry in redirects: + raise ClientRedirectError( + f"Loop detected: A {response.status_code} redirect" + f" to {response.location} was already made." + ) + + redirects.add(new_redirect_entry) + response.history = tuple(history) + history.append(response) + response = self.resolve_redirect(response, buffered=buffered) + else: + # This is the final request after redirects. + response.history = tuple(history) + # Close the input stream when closing the response, in case + # the input is an open temporary file. + response.call_on_close(request.input_stream.close) + return response + + def get(self, *args: t.Any, **kw: t.Any) -> TestResponse: + """Call :meth:`open` with ``method`` set to ``GET``.""" + kw["method"] = "GET" + return self.open(*args, **kw) + + def post(self, *args: t.Any, **kw: t.Any) -> TestResponse: + """Call :meth:`open` with ``method`` set to ``POST``.""" + kw["method"] = "POST" + return self.open(*args, **kw) + + def put(self, *args: t.Any, **kw: t.Any) -> TestResponse: + """Call :meth:`open` with ``method`` set to ``PUT``.""" + kw["method"] = "PUT" + return self.open(*args, **kw) + + def delete(self, *args: t.Any, **kw: t.Any) -> TestResponse: + """Call :meth:`open` with ``method`` set to ``DELETE``.""" + kw["method"] = "DELETE" + return self.open(*args, **kw) + + def patch(self, *args: t.Any, **kw: t.Any) -> TestResponse: + """Call :meth:`open` with ``method`` set to ``PATCH``.""" + kw["method"] = "PATCH" + return self.open(*args, **kw) + + def options(self, *args: t.Any, **kw: t.Any) -> TestResponse: + """Call :meth:`open` with ``method`` set to ``OPTIONS``.""" + kw["method"] = "OPTIONS" + return self.open(*args, **kw) + + def head(self, *args: t.Any, **kw: t.Any) -> TestResponse: + """Call :meth:`open` with ``method`` set to ``HEAD``.""" + kw["method"] = "HEAD" + return self.open(*args, **kw) + + def trace(self, *args: t.Any, **kw: t.Any) -> TestResponse: + """Call :meth:`open` with ``method`` set to ``TRACE``.""" + kw["method"] = "TRACE" + return self.open(*args, **kw) + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.application!r}>" + + +def create_environ(*args: t.Any, **kwargs: t.Any) -> WSGIEnvironment: + """Create a new WSGI environ dict based on the values passed. The first + parameter should be the path of the request which defaults to '/'. The + second one can either be an absolute path (in that case the host is + localhost:80) or a full path to the request with scheme, netloc port and + the path to the script. + + This accepts the same arguments as the :class:`EnvironBuilder` + constructor. + + .. versionchanged:: 0.5 + This function is now a thin wrapper over :class:`EnvironBuilder` which + was added in 0.5. The `headers`, `environ_base`, `environ_overrides` + and `charset` parameters were added. + """ + builder = EnvironBuilder(*args, **kwargs) + + try: + return builder.get_environ() + finally: + builder.close() + + +def run_wsgi_app( + app: WSGIApplication, environ: WSGIEnvironment, buffered: bool = False +) -> tuple[t.Iterable[bytes], str, Headers]: + """Return a tuple in the form (app_iter, status, headers) of the + application output. This works best if you pass it an application that + returns an iterator all the time. + + Sometimes applications may use the `write()` callable returned + by the `start_response` function. This tries to resolve such edge + cases automatically. But if you don't get the expected output you + should set `buffered` to `True` which enforces buffering. + + If passed an invalid WSGI application the behavior of this function is + undefined. Never pass non-conforming WSGI applications to this function. + + :param app: the application to execute. + :param buffered: set to `True` to enforce buffering. + :return: tuple in the form ``(app_iter, status, headers)`` + """ + # Copy environ to ensure any mutations by the app (ProxyFix, for + # example) don't affect subsequent requests (such as redirects). + environ = _get_environ(environ).copy() + status: str + response: tuple[str, list[tuple[str, str]]] | None = None + buffer: list[bytes] = [] + + def start_response(status, headers, exc_info=None): # type: ignore + nonlocal response + + if exc_info: + try: + raise exc_info[1].with_traceback(exc_info[2]) + finally: + exc_info = None + + response = (status, headers) + return buffer.append + + app_rv = app(environ, start_response) + close_func = getattr(app_rv, "close", None) + app_iter: t.Iterable[bytes] = iter(app_rv) + + # when buffering we emit the close call early and convert the + # application iterator into a regular list + if buffered: + try: + app_iter = list(app_iter) + finally: + if close_func is not None: + close_func() + + # otherwise we iterate the application iter until we have a response, chain + # the already received data with the already collected data and wrap it in + # a new `ClosingIterator` if we need to restore a `close` callable from the + # original return value. + else: + for item in app_iter: + buffer.append(item) + + if response is not None: + break + + if buffer: + app_iter = chain(buffer, app_iter) + + if close_func is not None and app_iter is not app_rv: + app_iter = ClosingIterator(app_iter, close_func) + + status, headers = response # type: ignore + return app_iter, status, Headers(headers) + + +class TestResponse(Response): + """:class:`~werkzeug.wrappers.Response` subclass that provides extra + information about requests made with the test :class:`Client`. + + Test client requests will always return an instance of this class. + If a custom response class is passed to the client, it is + subclassed along with this to support test information. + + If the test request included large files, or if the application is + serving a file, call :meth:`close` to close any open files and + prevent Python showing a ``ResourceWarning``. + + .. versionchanged:: 2.2 + Set the ``default_mimetype`` to None to prevent a mimetype being + assumed if missing. + + .. versionchanged:: 2.1 + Response instances cannot be treated as tuples. + + .. versionadded:: 2.0 + Test client methods always return instances of this class. + """ + + default_mimetype = None + # Don't assume a mimetype, instead use whatever the response provides + + request: Request + """A request object with the environ used to make the request that + resulted in this response. + """ + + history: tuple[TestResponse, ...] + """A list of intermediate responses. Populated when the test request + is made with ``follow_redirects`` enabled. + """ + + # Tell Pytest to ignore this, it's not a test class. + __test__ = False + + def __init__( + self, + response: t.Iterable[bytes], + status: str, + headers: Headers, + request: Request, + history: tuple[TestResponse] = (), # type: ignore + **kwargs: t.Any, + ) -> None: + super().__init__(response, status, headers, **kwargs) + self.request = request + self.history = history + self._compat_tuple = response, status, headers + + @cached_property + def text(self) -> str: + """The response data as text. A shortcut for + ``response.get_data(as_text=True)``. + + .. versionadded:: 2.1 + """ + return self.get_data(as_text=True) + + +@dataclasses.dataclass +class Cookie: + """A cookie key, value, and parameters. + + The class itself is not a public API. Its attributes are documented for inspection + with :meth:`.Client.get_cookie` only. + + .. versionadded:: 2.3 + """ + + key: str + """The cookie key, encoded as a client would see it.""" + + value: str + """The cookie key, encoded as a client would see it.""" + + decoded_key: str + """The cookie key, decoded as the application would set and see it.""" + + decoded_value: str + """The cookie value, decoded as the application would set and see it.""" + + expires: datetime | None + """The time at which the cookie is no longer valid.""" + + max_age: int | None + """The number of seconds from when the cookie was set at which it is + no longer valid. + """ + + domain: str + """The domain that the cookie was set for, or the request domain if not set.""" + + origin_only: bool + """Whether the cookie will be sent for exact domain matches only. This is ``True`` + if the ``Domain`` parameter was not present. + """ + + path: str + """The path that the cookie was set for.""" + + secure: bool | None + """The ``Secure`` parameter.""" + + http_only: bool | None + """The ``HttpOnly`` parameter.""" + + same_site: str | None + """The ``SameSite`` parameter.""" + + def _matches_request(self, server_name: str, path: str) -> bool: + return ( + server_name == self.domain + or ( + not self.origin_only + and server_name.endswith(self.domain) + and server_name[: -len(self.domain)].endswith(".") + ) + ) and ( + path == self.path + or ( + path.startswith(self.path) + and path[len(self.path) - self.path.endswith("/") :].startswith("/") + ) + ) + + def _to_request_header(self) -> str: + return f"{self.key}={self.value}" + + @classmethod + def _from_response_header(cls, server_name: str, path: str, header: str) -> te.Self: + header, _, parameters_str = header.partition(";") + key, _, value = header.partition("=") + decoded_key, decoded_value = next(parse_cookie(header).items()) + params = {} + + for item in parameters_str.split(";"): + k, sep, v = item.partition("=") + params[k.strip().lower()] = v.strip() if sep else None + + return cls( + key=key.strip(), + value=value.strip(), + decoded_key=decoded_key, + decoded_value=decoded_value, + expires=parse_date(params.get("expires")), + max_age=int(params["max-age"] or 0) if "max-age" in params else None, + domain=params.get("domain") or server_name, + origin_only="domain" not in params, + path=params.get("path") or path.rpartition("/")[0] or "/", + secure="secure" in params, + http_only="httponly" in params, + same_site=params.get("samesite"), + ) + + @property + def _storage_key(self) -> tuple[str, str, str]: + return self.domain, self.path, self.decoded_key + + @property + def _should_delete(self) -> bool: + return self.max_age == 0 or ( + self.expires is not None and self.expires.timestamp() == 0 + ) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/testapp.py b/backend/test/lib/python3.8/site-packages/werkzeug/testapp.py new file mode 100644 index 0000000000000000000000000000000000000000..57f1f6fdf5e1c2e920c835c5aa1c262391923d84 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/testapp.py @@ -0,0 +1,181 @@ +"""A small application that can be used to test a WSGI server and check +it for WSGI compliance. +""" +from __future__ import annotations + +import os +import sys +import typing as t +from textwrap import wrap + +from markupsafe import escape + +from . import __version__ as _werkzeug_version +from .wrappers.request import Request +from .wrappers.response import Response + +TEMPLATE = """\ +<!doctype html> +<html lang=en> +<title>WSGI Information</title> +<style type="text/css"> + @import url(https://fonts.googleapis.com/css?family=Ubuntu); + + body { font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', + 'Verdana', sans-serif; background-color: white; color: #000; + font-size: 15px; text-align: center; } + div.box { text-align: left; width: 45em; margin: auto; padding: 50px 0; + background-color: white; } + h1, h2 { font-family: 'Ubuntu', 'Lucida Grande', 'Lucida Sans Unicode', + 'Geneva', 'Verdana', sans-serif; font-weight: normal; } + h1 { margin: 0 0 30px 0; } + h2 { font-size: 1.4em; margin: 1em 0 0.5em 0; } + table { width: 100%%; border-collapse: collapse; border: 1px solid #AFC5C9 } + table th { background-color: #AFC1C4; color: white; font-size: 0.72em; + font-weight: normal; width: 18em; vertical-align: top; + padding: 0.5em 0 0.1em 0.5em; } + table td { border: 1px solid #AFC5C9; padding: 0.1em 0 0.1em 0.5em; } + code { font-family: 'Consolas', 'Monaco', 'Bitstream Vera Sans Mono', + monospace; font-size: 0.7em; } + ul li { line-height: 1.5em; } + ul.path { font-size: 0.7em; margin: 0 -30px; padding: 8px 30px; + list-style: none; background: #E8EFF0; } + ul.path li { line-height: 1.6em; } + li.virtual { color: #999; text-decoration: underline; } + li.exp { background: white; } +</style> +<div class="box"> + <h1>WSGI Information</h1> + <p> + This page displays all available information about the WSGI server and + the underlying Python interpreter. + <h2 id="python-interpreter">Python Interpreter</h2> + <table> + <tr> + <th>Python Version + <td>%(python_version)s + <tr> + <th>Platform + <td>%(platform)s [%(os)s] + <tr> + <th>API Version + <td>%(api_version)s + <tr> + <th>Byteorder + <td>%(byteorder)s + <tr> + <th>Werkzeug Version + <td>%(werkzeug_version)s + </table> + <h2 id="wsgi-environment">WSGI Environment</h2> + <table>%(wsgi_env)s</table> + <h2 id="installed-eggs">Installed Eggs</h2> + <p> + The following python packages were installed on the system as + Python eggs: + <ul>%(python_eggs)s</ul> + <h2 id="sys-path">System Path</h2> + <p> + The following paths are the current contents of the load path. The + following entries are looked up for Python packages. Note that not + all items in this path are folders. Gray and underlined items are + entries pointing to invalid resources or used by custom import hooks + such as the zip importer. + <p> + Items with a bright background were expanded for display from a relative + path. If you encounter such paths in the output you might want to check + your setup as relative paths are usually problematic in multithreaded + environments. + <ul class="path">%(sys_path)s</ul> +</div> +""" + + +def iter_sys_path() -> t.Iterator[tuple[str, bool, bool]]: + if os.name == "posix": + + def strip(x: str) -> str: + prefix = os.path.expanduser("~") + if x.startswith(prefix): + x = f"~{x[len(prefix) :]}" + return x + + else: + + def strip(x: str) -> str: + return x + + cwd = os.path.abspath(os.getcwd()) + for item in sys.path: + path = os.path.join(cwd, item or os.path.curdir) + yield strip(os.path.normpath(path)), not os.path.isdir(path), path != item + + +@Request.application +def test_app(req: Request) -> Response: + """Simple test application that dumps the environment. You can use + it to check if Werkzeug is working properly: + + .. sourcecode:: pycon + + >>> from werkzeug.serving import run_simple + >>> from werkzeug.testapp import test_app + >>> run_simple('localhost', 3000, test_app) + * Running on http://localhost:3000/ + + The application displays important information from the WSGI environment, + the Python interpreter and the installed libraries. + """ + try: + import pkg_resources + except ImportError: + eggs: t.Iterable[t.Any] = () + else: + eggs = sorted( + pkg_resources.working_set, + key=lambda x: x.project_name.lower(), + ) + python_eggs = [] + for egg in eggs: + try: + version = egg.version + except (ValueError, AttributeError): + version = "unknown" + python_eggs.append( + f"<li>{escape(egg.project_name)} <small>[{escape(version)}]</small>" + ) + + wsgi_env = [] + sorted_environ = sorted(req.environ.items(), key=lambda x: repr(x[0]).lower()) + for key, value in sorted_environ: + value = "".join(wrap(str(escape(repr(value))))) + wsgi_env.append(f"<tr><th>{escape(key)}<td><code>{value}</code>") + + sys_path = [] + for item, virtual, expanded in iter_sys_path(): + class_ = [] + if virtual: + class_.append("virtual") + if expanded: + class_.append("exp") + class_ = f' class="{" ".join(class_)}"' if class_ else "" + sys_path.append(f"<li{class_}>{escape(item)}") + + context = { + "python_version": "<br>".join(escape(sys.version).splitlines()), + "platform": escape(sys.platform), + "os": escape(os.name), + "api_version": sys.api_version, + "byteorder": sys.byteorder, + "werkzeug_version": _werkzeug_version, + "python_eggs": "\n".join(python_eggs), + "wsgi_env": "\n".join(wsgi_env), + "sys_path": "\n".join(sys_path), + } + return Response(TEMPLATE % context, mimetype="text/html") + + +if __name__ == "__main__": + from .serving import run_simple + + run_simple("localhost", 5000, test_app, use_reloader=True) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/urls.py b/backend/test/lib/python3.8/site-packages/werkzeug/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..f5760eb4c1da2efb5b8071d987ce33bb32127050 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/urls.py @@ -0,0 +1,1377 @@ +"""Functions for working with URLs. + +Contains implementations of functions from :mod:`urllib.parse` that +handle bytes and strings. +""" +from __future__ import annotations + +import codecs +import os +import re +import typing as t +import warnings +from urllib.parse import quote +from urllib.parse import unquote +from urllib.parse import urlencode +from urllib.parse import urlsplit +from urllib.parse import urlunsplit + +from ._internal import _check_str_tuple +from ._internal import _decode_idna +from ._internal import _make_encode_wrapper +from ._internal import _to_str +from .datastructures import iter_multi_items + +if t.TYPE_CHECKING: + from . import datastructures as ds + +# A regular expression for what a valid schema looks like +_scheme_re = re.compile(r"^[a-zA-Z0-9+-.]+$") + +# Characters that are safe in any part of an URL. +_always_safe_chars = ( + "abcdefghijklmnopqrstuvwxyz" + "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + "0123456789" + "-._~" + "$!'()*+,;" # RFC3986 sub-delims set, not including query string delimiters &= +) +_always_safe = frozenset(_always_safe_chars.encode("ascii")) + +_hexdigits = "0123456789ABCDEFabcdef" +_hextobyte = { + f"{a}{b}".encode("ascii"): int(f"{a}{b}", 16) + for a in _hexdigits + for b in _hexdigits +} +_bytetohex = [f"%{char:02X}".encode("ascii") for char in range(256)] + + +class _URLTuple(t.NamedTuple): + scheme: str + netloc: str + path: str + query: str + fragment: str + + +class BaseURL(_URLTuple): + """Superclass of :py:class:`URL` and :py:class:`BytesURL`. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use the ``urllib.parse`` library instead. + """ + + __slots__ = () + _at: str + _colon: str + _lbracket: str + _rbracket: str + + def __new__(cls, *args: t.Any, **kwargs: t.Any) -> BaseURL: + warnings.warn( + f"'werkzeug.urls.{cls.__name__}' is deprecated and will be removed in" + " Werkzeug 3.0. Use the 'urllib.parse' library instead.", + DeprecationWarning, + stacklevel=2, + ) + return super().__new__(cls, *args, **kwargs) + + def __str__(self) -> str: + return self.to_url() + + def replace(self, **kwargs: t.Any) -> BaseURL: + """Return an URL with the same values, except for those parameters + given new values by whichever keyword arguments are specified.""" + return self._replace(**kwargs) + + @property + def host(self) -> str | None: + """The host part of the URL if available, otherwise `None`. The + host is either the hostname or the IP address mentioned in the + URL. It will not contain the port. + """ + return self._split_host()[0] + + @property + def ascii_host(self) -> str | None: + """Works exactly like :attr:`host` but will return a result that + is restricted to ASCII. If it finds a netloc that is not ASCII + it will attempt to idna decode it. This is useful for socket + operations when the URL might include internationalized characters. + """ + rv = self.host + if rv is not None and isinstance(rv, str): + try: + rv = rv.encode("idna").decode("ascii") + except UnicodeError: + pass + return rv + + @property + def port(self) -> int | None: + """The port in the URL as an integer if it was present, `None` + otherwise. This does not fill in default ports. + """ + try: + rv = int(_to_str(self._split_host()[1])) + if 0 <= rv <= 65535: + return rv + except (ValueError, TypeError): + pass + return None + + @property + def auth(self) -> str | None: + """The authentication part in the URL if available, `None` + otherwise. + """ + return self._split_netloc()[0] + + @property + def username(self) -> str | None: + """The username if it was part of the URL, `None` otherwise. + This undergoes URL decoding and will always be a string. + """ + rv = self._split_auth()[0] + if rv is not None: + return _url_unquote_legacy(rv) + return None + + @property + def raw_username(self) -> str | None: + """The username if it was part of the URL, `None` otherwise. + Unlike :attr:`username` this one is not being decoded. + """ + return self._split_auth()[0] + + @property + def password(self) -> str | None: + """The password if it was part of the URL, `None` otherwise. + This undergoes URL decoding and will always be a string. + """ + rv = self._split_auth()[1] + if rv is not None: + return _url_unquote_legacy(rv) + return None + + @property + def raw_password(self) -> str | None: + """The password if it was part of the URL, `None` otherwise. + Unlike :attr:`password` this one is not being decoded. + """ + return self._split_auth()[1] + + def decode_query(self, *args: t.Any, **kwargs: t.Any) -> ds.MultiDict[str, str]: + """Decodes the query part of the URL. Ths is a shortcut for + calling :func:`url_decode` on the query argument. The arguments and + keyword arguments are forwarded to :func:`url_decode` unchanged. + """ + return url_decode(self.query, *args, **kwargs) + + def join(self, *args: t.Any, **kwargs: t.Any) -> BaseURL: + """Joins this URL with another one. This is just a convenience + function for calling into :meth:`url_join` and then parsing the + return value again. + """ + return url_parse(url_join(self, *args, **kwargs)) + + def to_url(self) -> str: + """Returns a URL string or bytes depending on the type of the + information stored. This is just a convenience function + for calling :meth:`url_unparse` for this URL. + """ + return url_unparse(self) + + def encode_netloc(self) -> str: + """Encodes the netloc part to an ASCII safe URL as bytes.""" + rv = self.ascii_host or "" + if ":" in rv: + rv = f"[{rv}]" + port = self.port + if port is not None: + rv = f"{rv}:{port}" + auth = ":".join( + filter( + None, + [ + url_quote(self.raw_username or "", "utf-8", "strict", "/:%"), + url_quote(self.raw_password or "", "utf-8", "strict", "/:%"), + ], + ) + ) + if auth: + rv = f"{auth}@{rv}" + return rv + + def decode_netloc(self) -> str: + """Decodes the netloc part into a string.""" + host = self.host or "" + + if isinstance(host, bytes): + host = host.decode() + + rv = _decode_idna(host) + + if ":" in rv: + rv = f"[{rv}]" + port = self.port + if port is not None: + rv = f"{rv}:{port}" + auth = ":".join( + filter( + None, + [ + _url_unquote_legacy(self.raw_username or "", "/:%@"), + _url_unquote_legacy(self.raw_password or "", "/:%@"), + ], + ) + ) + if auth: + rv = f"{auth}@{rv}" + return rv + + def to_uri_tuple(self) -> BaseURL: + """Returns a :class:`BytesURL` tuple that holds a URI. This will + encode all the information in the URL properly to ASCII using the + rules a web browser would follow. + + It's usually more interesting to directly call :meth:`iri_to_uri` which + will return a string. + """ + return url_parse(iri_to_uri(self)) + + def to_iri_tuple(self) -> BaseURL: + """Returns a :class:`URL` tuple that holds a IRI. This will try + to decode as much information as possible in the URL without + losing information similar to how a web browser does it for the + URL bar. + + It's usually more interesting to directly call :meth:`uri_to_iri` which + will return a string. + """ + return url_parse(uri_to_iri(self)) + + def get_file_location( + self, pathformat: str | None = None + ) -> tuple[str | None, str | None]: + """Returns a tuple with the location of the file in the form + ``(server, location)``. If the netloc is empty in the URL or + points to localhost, it's represented as ``None``. + + The `pathformat` by default is autodetection but needs to be set + when working with URLs of a specific system. The supported values + are ``'windows'`` when working with Windows or DOS paths and + ``'posix'`` when working with posix paths. + + If the URL does not point to a local file, the server and location + are both represented as ``None``. + + :param pathformat: The expected format of the path component. + Currently ``'windows'`` and ``'posix'`` are + supported. Defaults to ``None`` which is + autodetect. + """ + if self.scheme != "file": + return None, None + + path = url_unquote(self.path) + host = self.netloc or None + + if pathformat is None: + if os.name == "nt": + pathformat = "windows" + else: + pathformat = "posix" + + if pathformat == "windows": + if path[:1] == "/" and path[1:2].isalpha() and path[2:3] in "|:": + path = f"{path[1:2]}:{path[3:]}" + windows_share = path[:3] in ("\\" * 3, "/" * 3) + import ntpath + + path = ntpath.normpath(path) + # Windows shared drives are represented as ``\\host\\directory``. + # That results in a URL like ``file://///host/directory``, and a + # path like ``///host/directory``. We need to special-case this + # because the path contains the hostname. + if windows_share and host is None: + parts = path.lstrip("\\").split("\\", 1) + if len(parts) == 2: + host, path = parts + else: + host = parts[0] + path = "" + elif pathformat == "posix": + import posixpath + + path = posixpath.normpath(path) + else: + raise TypeError(f"Invalid path format {pathformat!r}") + + if host in ("127.0.0.1", "::1", "localhost"): + host = None + + return host, path + + def _split_netloc(self) -> tuple[str | None, str]: + if self._at in self.netloc: + auth, _, netloc = self.netloc.partition(self._at) + return auth, netloc + return None, self.netloc + + def _split_auth(self) -> tuple[str | None, str | None]: + auth = self._split_netloc()[0] + if not auth: + return None, None + if self._colon not in auth: + return auth, None + + username, _, password = auth.partition(self._colon) + return username, password + + def _split_host(self) -> tuple[str | None, str | None]: + rv = self._split_netloc()[1] + if not rv: + return None, None + + if not rv.startswith(self._lbracket): + if self._colon in rv: + host, _, port = rv.partition(self._colon) + return host, port + return rv, None + + idx = rv.find(self._rbracket) + if idx < 0: + return rv, None + + host = rv[1:idx] + rest = rv[idx + 1 :] + if rest.startswith(self._colon): + return host, rest[1:] + return host, None + + +class URL(BaseURL): + """Represents a parsed URL. This behaves like a regular tuple but + also has some extra attributes that give further insight into the + URL. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use the ``urllib.parse`` library instead. + """ + + __slots__ = () + _at = "@" + _colon = ":" + _lbracket = "[" + _rbracket = "]" + + def encode(self, charset: str = "utf-8", errors: str = "replace") -> BytesURL: + """Encodes the URL to a tuple made out of bytes. The charset is + only being used for the path, query and fragment. + """ + return BytesURL( + self.scheme.encode("ascii"), + self.encode_netloc(), + self.path.encode(charset, errors), + self.query.encode(charset, errors), + self.fragment.encode(charset, errors), + ) + + +class BytesURL(BaseURL): + """Represents a parsed URL in bytes. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use the ``urllib.parse`` library instead. + """ + + __slots__ = () + _at = b"@" # type: ignore + _colon = b":" # type: ignore + _lbracket = b"[" # type: ignore + _rbracket = b"]" # type: ignore + + def __str__(self) -> str: + return self.to_url().decode("utf-8", "replace") # type: ignore + + def encode_netloc(self) -> bytes: # type: ignore + """Returns the netloc unchanged as bytes.""" + return self.netloc # type: ignore + + def decode(self, charset: str = "utf-8", errors: str = "replace") -> URL: + """Decodes the URL to a tuple made out of strings. The charset is + only being used for the path, query and fragment. + """ + return URL( + self.scheme.decode("ascii"), # type: ignore + self.decode_netloc(), + self.path.decode(charset, errors), # type: ignore + self.query.decode(charset, errors), # type: ignore + self.fragment.decode(charset, errors), # type: ignore + ) + + +_unquote_maps: dict[frozenset[int], dict[bytes, int]] = {frozenset(): _hextobyte} + + +def _unquote_to_bytes(string: str | bytes, unsafe: str | bytes = "") -> bytes: + if isinstance(string, str): + string = string.encode("utf-8") + + if isinstance(unsafe, str): + unsafe = unsafe.encode("utf-8") + + unsafe = frozenset(bytearray(unsafe)) + groups = iter(string.split(b"%")) + result = bytearray(next(groups, b"")) + + try: + hex_to_byte = _unquote_maps[unsafe] + except KeyError: + hex_to_byte = _unquote_maps[unsafe] = { + h: b for h, b in _hextobyte.items() if b not in unsafe + } + + for group in groups: + code = group[:2] + + if code in hex_to_byte: + result.append(hex_to_byte[code]) + result.extend(group[2:]) + else: + result.append(37) # % + result.extend(group) + + return bytes(result) + + +def _url_encode_impl( + obj: t.Mapping[str, str] | t.Iterable[tuple[str, str]], + charset: str, + sort: bool, + key: t.Callable[[tuple[str, str]], t.Any] | None, +) -> t.Iterator[str]: + from .datastructures import iter_multi_items + + iterable: t.Iterable[tuple[str, str]] = iter_multi_items(obj) + + if sort: + iterable = sorted(iterable, key=key) + + for key_str, value_str in iterable: + if value_str is None: + continue + + if not isinstance(key_str, bytes): + key_bytes = str(key_str).encode(charset) + else: + key_bytes = key_str + + if not isinstance(value_str, bytes): + value_bytes = str(value_str).encode(charset) + else: + value_bytes = value_str + + yield f"{_fast_url_quote_plus(key_bytes)}={_fast_url_quote_plus(value_bytes)}" + + +def _url_unquote_legacy(value: str, unsafe: str = "") -> str: + try: + return url_unquote(value, charset="utf-8", errors="strict", unsafe=unsafe) + except UnicodeError: + return url_unquote(value, charset="latin1", unsafe=unsafe) + + +def url_parse( + url: str, scheme: str | None = None, allow_fragments: bool = True +) -> BaseURL: + """Parses a URL from a string into a :class:`URL` tuple. If the URL + is lacking a scheme it can be provided as second argument. Otherwise, + it is ignored. Optionally fragments can be stripped from the URL + by setting `allow_fragments` to `False`. + + The inverse of this function is :func:`url_unparse`. + + :param url: the URL to parse. + :param scheme: the default schema to use if the URL is schemaless. + :param allow_fragments: if set to `False` a fragment will be removed + from the URL. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use ``urllib.parse.urlsplit`` instead. + """ + warnings.warn( + "'werkzeug.urls.url_parse' is deprecated and will be removed in Werkzeug 3.0." + " Use 'urllib.parse.urlsplit' instead.", + DeprecationWarning, + stacklevel=2, + ) + s = _make_encode_wrapper(url) + is_text_based = isinstance(url, str) + + if scheme is None: + scheme = s("") + netloc = query = fragment = s("") + i = url.find(s(":")) + if i > 0 and _scheme_re.match(_to_str(url[:i], errors="replace")): + # make sure "iri" is not actually a port number (in which case + # "scheme" is really part of the path) + rest = url[i + 1 :] + if not rest or any(c not in s("0123456789") for c in rest): + # not a port number + scheme, url = url[:i].lower(), rest + + if url[:2] == s("//"): + delim = len(url) + for c in s("/?#"): + wdelim = url.find(c, 2) + if wdelim >= 0: + delim = min(delim, wdelim) + netloc, url = url[2:delim], url[delim:] + if (s("[") in netloc and s("]") not in netloc) or ( + s("]") in netloc and s("[") not in netloc + ): + raise ValueError("Invalid IPv6 URL") + + if allow_fragments and s("#") in url: + url, fragment = url.split(s("#"), 1) + if s("?") in url: + url, query = url.split(s("?"), 1) + + result_type = URL if is_text_based else BytesURL + + return result_type(scheme, netloc, url, query, fragment) + + +def _make_fast_url_quote( + charset: str = "utf-8", + errors: str = "strict", + safe: str | bytes = "/:", + unsafe: str | bytes = "", +) -> t.Callable[[bytes], str]: + """Precompile the translation table for a URL encoding function. + + Unlike :func:`url_quote`, the generated function only takes the + string to quote. + + :param charset: The charset to encode the result with. + :param errors: How to handle encoding errors. + :param safe: An optional sequence of safe characters to never encode. + :param unsafe: An optional sequence of unsafe characters to always encode. + """ + if isinstance(safe, str): + safe = safe.encode(charset, errors) + + if isinstance(unsafe, str): + unsafe = unsafe.encode(charset, errors) + + safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) + table = [chr(c) if c in safe else f"%{c:02X}" for c in range(256)] + + def quote(string: bytes) -> str: + return "".join([table[c] for c in string]) + + return quote + + +_fast_url_quote = _make_fast_url_quote() +_fast_quote_plus = _make_fast_url_quote(safe=" ", unsafe="+") + + +def _fast_url_quote_plus(string: bytes) -> str: + return _fast_quote_plus(string).replace(" ", "+") + + +def url_quote( + string: str | bytes, + charset: str = "utf-8", + errors: str = "strict", + safe: str | bytes = "/:", + unsafe: str | bytes = "", +) -> str: + """URL encode a single string with a given encoding. + + :param s: the string to quote. + :param charset: the charset to be used. + :param safe: an optional sequence of safe characters. + :param unsafe: an optional sequence of unsafe characters. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use ``urllib.parse.quote`` instead. + + .. versionadded:: 0.9.2 + The `unsafe` parameter was added. + """ + warnings.warn( + "'werkzeug.urls.url_quote' is deprecated and will be removed in Werkzeug 3.0." + " Use 'urllib.parse.quote' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if not isinstance(string, (str, bytes, bytearray)): + string = str(string) + if isinstance(string, str): + string = string.encode(charset, errors) + if isinstance(safe, str): + safe = safe.encode(charset, errors) + if isinstance(unsafe, str): + unsafe = unsafe.encode(charset, errors) + safe = (frozenset(bytearray(safe)) | _always_safe) - frozenset(bytearray(unsafe)) + rv = bytearray() + for char in bytearray(string): + if char in safe: + rv.append(char) + else: + rv.extend(_bytetohex[char]) + return bytes(rv).decode(charset) + + +def url_quote_plus( + string: str, charset: str = "utf-8", errors: str = "strict", safe: str = "" +) -> str: + """URL encode a single string with the given encoding and convert + whitespace to "+". + + :param s: The string to quote. + :param charset: The charset to be used. + :param safe: An optional sequence of safe characters. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use ``urllib.parse.quote_plus`` instead. + """ + warnings.warn( + "'werkzeug.urls.url_quote_plus' is deprecated and will be removed in Werkzeug" + " 2.4. Use 'urllib.parse.quote_plus' instead.", + DeprecationWarning, + stacklevel=2, + ) + + return url_quote(string, charset, errors, safe + " ", "+").replace(" ", "+") + + +def url_unparse(components: tuple[str, str, str, str, str]) -> str: + """The reverse operation to :meth:`url_parse`. This accepts arbitrary + as well as :class:`URL` tuples and returns a URL as a string. + + :param components: the parsed URL as tuple which should be converted + into a URL string. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use ``urllib.parse.urlunsplit`` instead. + """ + warnings.warn( + "'werkzeug.urls.url_unparse' is deprecated and will be removed in Werkzeug 3.0." + " Use 'urllib.parse.urlunsplit' instead.", + DeprecationWarning, + stacklevel=2, + ) + _check_str_tuple(components) + scheme, netloc, path, query, fragment = components + s = _make_encode_wrapper(scheme) + url = s("") + + # We generally treat file:///x and file:/x the same which is also + # what browsers seem to do. This also allows us to ignore a schema + # register for netloc utilization or having to differentiate between + # empty and missing netloc. + if netloc or (scheme and path.startswith(s("/"))): + if path and path[:1] != s("/"): + path = s("/") + path + url = s("//") + (netloc or s("")) + path + elif path: + url += path + if scheme: + url = scheme + s(":") + url + if query: + url = url + s("?") + query + if fragment: + url = url + s("#") + fragment + return url + + +def url_unquote( + s: str | bytes, + charset: str = "utf-8", + errors: str = "replace", + unsafe: str = "", +) -> str: + """URL decode a single string with a given encoding. If the charset + is set to `None` no decoding is performed and raw bytes are + returned. + + :param s: the string to unquote. + :param charset: the charset of the query string. If set to `None` + no decoding will take place. + :param errors: the error handling for the charset decoding. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use ``urllib.parse.unquote`` instead. + """ + warnings.warn( + "'werkzeug.urls.url_unquote' is deprecated and will be removed in Werkzeug 3.0." + " Use 'urllib.parse.unquote' instead.", + DeprecationWarning, + stacklevel=2, + ) + rv = _unquote_to_bytes(s, unsafe) + if charset is None: + return rv + return rv.decode(charset, errors) + + +def url_unquote_plus( + s: str | bytes, charset: str = "utf-8", errors: str = "replace" +) -> str: + """URL decode a single string with the given `charset` and decode "+" to + whitespace. + + Per default encoding errors are ignored. If you want a different behavior + you can set `errors` to ``'replace'`` or ``'strict'``. + + :param s: The string to unquote. + :param charset: the charset of the query string. If set to `None` + no decoding will take place. + :param errors: The error handling for the `charset` decoding. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use ``urllib.parse.unquote_plus`` instead. + """ + warnings.warn( + "'werkzeug.urls.url_unquote_plus' is deprecated and will be removed in Werkzeug" + " 2.4. Use 'urllib.parse.unquote_plus' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(s, str): + s = s.replace("+", " ") + else: + s = s.replace(b"+", b" ") + + return url_unquote(s, charset, errors) + + +def url_fix(s: str, charset: str = "utf-8") -> str: + r"""Sometimes you get an URL by a user that just isn't a real URL because + it contains unsafe characters like ' ' and so on. This function can fix + some of the problems in a similar way browsers handle data entered by the + user: + + >>> url_fix('http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)') + 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)' + + :param s: the string with the URL to fix. + :param charset: The target charset for the URL if the url was given + as a string. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. + """ + warnings.warn( + "'werkzeug.urls.url_fix' is deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + # First step is to switch to text processing and to convert + # backslashes (which are invalid in URLs anyways) to slashes. This is + # consistent with what Chrome does. + s = _to_str(s, charset, "replace").replace("\\", "/") + + # For the specific case that we look like a malformed windows URL + # we want to fix this up manually: + if s.startswith("file://") and s[7:8].isalpha() and s[8:10] in (":/", "|/"): + s = f"file:///{s[7:]}" + + url = url_parse(s) + path = url_quote(url.path, charset, safe="/%+$!*'(),") + qs = url_quote_plus(url.query, charset, safe=":&%=+$!*'(),") + anchor = url_quote_plus(url.fragment, charset, safe=":&%=+$!*'(),") + return url_unparse((url.scheme, url.encode_netloc(), path, qs, anchor)) + + +def _codec_error_url_quote(e: UnicodeError) -> tuple[str, int]: + """Used in :func:`uri_to_iri` after unquoting to re-quote any + invalid bytes. + """ + # the docs state that UnicodeError does have these attributes, + # but mypy isn't picking them up + out = quote(e.object[e.start : e.end], safe="") # type: ignore + return out, e.end # type: ignore + + +codecs.register_error("werkzeug.url_quote", _codec_error_url_quote) + + +def _make_unquote_part(name: str, chars: str) -> t.Callable[[str, str, str], str]: + """Create a function that unquotes all percent encoded characters except those + given. This allows working with unquoted characters if possible while not changing + the meaning of a given part of a URL. + """ + choices = "|".join(f"{ord(c):02X}" for c in sorted(chars)) + pattern = re.compile(f"((?:%(?:{choices}))+)", re.I) + + def _unquote_partial(value: str, encoding: str, errors: str) -> str: + parts = iter(pattern.split(value)) + out = [] + + for part in parts: + out.append(unquote(part, encoding, errors)) + out.append(next(parts, "")) + + return "".join(out) + + _unquote_partial.__name__ = f"_unquote_{name}" + return _unquote_partial + + +# characters that should remain quoted in URL parts +# based on https://url.spec.whatwg.org/#percent-encoded-bytes +# always keep all controls, space, and % quoted +_always_unsafe = bytes((*range(0x21), 0x25, 0x7F)).decode() +_unquote_fragment = _make_unquote_part("fragment", _always_unsafe) +_unquote_query = _make_unquote_part("query", _always_unsafe + "&=+#") +_unquote_path = _make_unquote_part("path", _always_unsafe + "/?#") +_unquote_user = _make_unquote_part("user", _always_unsafe + ":@/?#") + + +def uri_to_iri( + uri: str | tuple[str, str, str, str, str], + charset: str | None = None, + errors: str | None = None, +) -> str: + """Convert a URI to an IRI. All valid UTF-8 characters are unquoted, + leaving all reserved and invalid characters quoted. If the URL has + a domain, it is decoded from Punycode. + + >>> uri_to_iri("http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF") + 'http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF' + + :param uri: The URI to convert. + :param charset: The encoding to encode unquoted bytes with. + :param errors: Error handler to use during ``bytes.encode``. By + default, invalid bytes are left quoted. + + .. versionchanged:: 2.3 + Passing a tuple or bytes, and the ``charset`` and ``errors`` parameters, are + deprecated and will be removed in Werkzeug 3.0. + + .. versionchanged:: 2.3 + Which characters remain quoted is specific to each part of the URL. + + .. versionchanged:: 0.15 + All reserved and invalid characters remain quoted. Previously, + only some reserved characters were preserved, and invalid bytes + were replaced instead of left quoted. + + .. versionadded:: 0.6 + """ + if isinstance(uri, tuple): + warnings.warn( + "Passing a tuple is deprecated and will not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + uri = urlunsplit(uri) + + if isinstance(uri, bytes): + warnings.warn( + "Passing bytes is deprecated and will not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + uri = uri.decode() + + if charset is not None: + warnings.warn( + "The 'charset' parameter is deprecated and will be removed" + " in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + charset = "utf-8" + + if errors is not None: + warnings.warn( + "The 'errors' parameter is deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + errors = "werkzeug.url_quote" + + parts = urlsplit(uri) + path = _unquote_path(parts.path, charset, errors) + query = _unquote_query(parts.query, charset, errors) + fragment = _unquote_fragment(parts.fragment, charset, errors) + + if parts.hostname: + netloc = _decode_idna(parts.hostname) + else: + netloc = "" + + if ":" in netloc: + netloc = f"[{netloc}]" + + if parts.port: + netloc = f"{netloc}:{parts.port}" + + if parts.username: + auth = _unquote_user(parts.username, charset, errors) + + if parts.password: + auth = f"{auth}:{_unquote_user(parts.password, charset, errors)}" + + netloc = f"{auth}@{netloc}" + + return urlunsplit((parts.scheme, netloc, path, query, fragment)) + + +def iri_to_uri( + iri: str | tuple[str, str, str, str, str], + charset: str | None = None, + errors: str | None = None, + safe_conversion: bool | None = None, +) -> str: + """Convert an IRI to a URI. All non-ASCII and unsafe characters are + quoted. If the URL has a domain, it is encoded to Punycode. + + >>> iri_to_uri('http://\\u2603.net/p\\xe5th?q=\\xe8ry%DF') + 'http://xn--n3h.net/p%C3%A5th?q=%C3%A8ry%DF' + + :param iri: The IRI to convert. + :param charset: The encoding of the IRI. + :param errors: Error handler to use during ``bytes.encode``. + + .. versionchanged:: 2.3 + Passing a tuple or bytes, and the ``charset`` and ``errors`` parameters, are + deprecated and will be removed in Werkzeug 3.0. + + .. versionchanged:: 2.3 + Which characters remain unquoted is specific to each part of the URL. + + .. versionchanged:: 2.3 + The ``safe_conversion`` parameter is deprecated and will be removed in Werkzeug + 2.4. + + .. versionchanged:: 0.15 + All reserved characters remain unquoted. Previously, only some reserved + characters were left unquoted. + + .. versionchanged:: 0.9.6 + The ``safe_conversion`` parameter was added. + + .. versionadded:: 0.6 + """ + if charset is not None: + warnings.warn( + "The 'charset' parameter is deprecated and will be removed" + " in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + charset = "utf-8" + + if isinstance(iri, tuple): + warnings.warn( + "Passing a tuple is deprecated and will not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + iri = urlunsplit(iri) + + if isinstance(iri, bytes): + warnings.warn( + "Passing bytes is deprecated and will not be supported in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + iri = iri.decode(charset) + + if errors is not None: + warnings.warn( + "The 'errors' parameter is deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + else: + errors = "strict" + + if safe_conversion is not None: + warnings.warn( + "The 'safe_conversion' parameter is deprecated and will be removed in" + " Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + if safe_conversion: + # If we're not sure if it's safe to normalize the URL, and it only contains + # ASCII characters, return it as-is. + try: + ascii_iri = iri.encode("ascii") + + # Only return if it doesn't have whitespace. (Why?) + if len(ascii_iri.split()) == 1: + return iri + except UnicodeError: + pass + + parts = urlsplit(iri) + # safe = https://url.spec.whatwg.org/#url-path-segment-string + # as well as percent for things that are already quoted + path = quote(parts.path, safe="%!$&'()*+,/:;=@", encoding=charset, errors=errors) + query = quote(parts.query, safe="%!$&'()*+,/:;=?@", encoding=charset, errors=errors) + fragment = quote( + parts.fragment, safe="%!#$&'()*+,/:;=?@", encoding=charset, errors=errors + ) + + if parts.hostname: + netloc = parts.hostname.encode("idna").decode("ascii") + else: + netloc = "" + + if ":" in netloc: + netloc = f"[{netloc}]" + + if parts.port: + netloc = f"{netloc}:{parts.port}" + + if parts.username: + auth = quote(parts.username, safe="%!$&'()*+,;=") + + if parts.password: + pass_quoted = quote(parts.password, safe="%!$&'()*+,;=") + auth = f"{auth}:{pass_quoted}" + + netloc = f"{auth}@{netloc}" + + return urlunsplit((parts.scheme, netloc, path, query, fragment)) + + +def _invalid_iri_to_uri(iri: str) -> str: + """The URL scheme ``itms-services://`` must contain the ``//`` even though it does + not have a host component. There may be other invalid schemes as well. Currently, + responses will always call ``iri_to_uri`` on the redirect ``Location`` header, which + removes the ``//``. For now, if the IRI only contains ASCII and does not contain + spaces, pass it on as-is. In Werkzeug 3.0, this should become a + ``response.process_location`` flag. + + :meta private: + """ + try: + iri.encode("ascii") + except UnicodeError: + pass + else: + if len(iri.split(None, 1)) == 1: + return iri + + return iri_to_uri(iri) + + +def url_decode( + s: t.AnyStr, + charset: str = "utf-8", + include_empty: bool = True, + errors: str = "replace", + separator: str = "&", + cls: type[ds.MultiDict] | None = None, +) -> ds.MultiDict[str, str]: + """Parse a query string and return it as a :class:`MultiDict`. + + :param s: The query string to parse. + :param charset: Decode bytes to string with this charset. If not + given, bytes are returned as-is. + :param include_empty: Include keys with empty values in the dict. + :param errors: Error handling behavior when decoding bytes. + :param separator: Separator character between pairs. + :param cls: Container to hold result instead of :class:`MultiDict`. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. Use ``urllib.parse.parse_qs`` instead. + + .. versionchanged:: 2.1 + The ``decode_keys`` parameter was removed. + + .. versionchanged:: 0.5 + In previous versions ";" and "&" could be used for url decoding. + Now only "&" is supported. If you want to use ";", a different + ``separator`` can be provided. + + .. versionchanged:: 0.5 + The ``cls`` parameter was added. + """ + warnings.warn( + "'werkzeug.urls.url_decode' is deprecated and will be removed in Werkzeug 2.4." + " Use 'urllib.parse.parse_qs' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if cls is None: + from .datastructures import MultiDict # noqa: F811 + + cls = MultiDict + if isinstance(s, str) and not isinstance(separator, str): + separator = separator.decode(charset or "ascii") + elif isinstance(s, bytes) and not isinstance(separator, bytes): + separator = separator.encode(charset or "ascii") # type: ignore + return cls( + _url_decode_impl( + s.split(separator), charset, include_empty, errors # type: ignore + ) + ) + + +def url_decode_stream( + stream: t.IO[bytes], + charset: str = "utf-8", + include_empty: bool = True, + errors: str = "replace", + separator: bytes = b"&", + cls: type[ds.MultiDict] | None = None, + limit: int | None = None, +) -> ds.MultiDict[str, str]: + """Works like :func:`url_decode` but decodes a stream. The behavior + of stream and limit follows functions like + :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is + directly fed to the `cls` so you can consume the data while it's + parsed. + + :param stream: a stream with the encoded querystring + :param charset: the charset of the query string. If set to `None` + no decoding will take place. + :param include_empty: Set to `False` if you don't want empty values to + appear in the dict. + :param errors: the decoding error behavior. + :param separator: the pair separator to be used, defaults to ``&`` + :param cls: an optional dict class to use. If this is not specified + or `None` the default :class:`MultiDict` is used. + :param limit: the content length of the URL data. Not necessary if + a limited stream is provided. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 2.4. Use ``urllib.parse.parse_qs`` instead. + + .. versionchanged:: 2.1 + The ``decode_keys`` and ``return_iterator`` parameters were removed. + + .. versionadded:: 0.8 + """ + warnings.warn( + "'werkzeug.urls.url_decode_stream' is deprecated and will be removed in" + " Werkzeug 2.4. Use 'urllib.parse.parse_qs' instead.", + DeprecationWarning, + stacklevel=2, + ) + + from .wsgi import make_chunk_iter + + pair_iter = make_chunk_iter(stream, separator, limit) + decoder = _url_decode_impl(pair_iter, charset, include_empty, errors) + + if cls is None: + from .datastructures import MultiDict # noqa: F811 + + cls = MultiDict + + return cls(decoder) + + +def _url_decode_impl( + pair_iter: t.Iterable[t.AnyStr], charset: str, include_empty: bool, errors: str +) -> t.Iterator[tuple[str, str]]: + for pair in pair_iter: + if not pair: + continue + s = _make_encode_wrapper(pair) + equal = s("=") + if equal in pair: + key, value = pair.split(equal, 1) + else: + if not include_empty: + continue + key = pair + value = s("") + yield ( + url_unquote_plus(key, charset, errors), + url_unquote_plus(value, charset, errors), + ) + + +def url_encode( + obj: t.Mapping[str, str] | t.Iterable[tuple[str, str]], + charset: str = "utf-8", + sort: bool = False, + key: t.Callable[[tuple[str, str]], t.Any] | None = None, + separator: str = "&", +) -> str: + """URL encode a dict/`MultiDict`. If a value is `None` it will not appear + in the result string. Per default only values are encoded into the target + charset strings. + + :param obj: the object to encode into a query string. + :param charset: the charset of the query string. + :param sort: set to `True` if you want parameters to be sorted by `key`. + :param separator: the separator to be used for the pairs. + :param key: an optional function to be used for sorting. For more details + check out the :func:`sorted` documentation. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 2.4. Use ``urllib.parse.urlencode`` instead. + + .. versionchanged:: 2.1 + The ``encode_keys`` parameter was removed. + + .. versionchanged:: 0.5 + Added the ``sort``, ``key``, and ``separator`` parameters. + """ + warnings.warn( + "'werkzeug.urls.url_encode' is deprecated and will be removed in Werkzeug 2.4." + " Use 'urllib.parse.urlencode' instead.", + DeprecationWarning, + stacklevel=2, + ) + separator = _to_str(separator, "ascii") + return separator.join(_url_encode_impl(obj, charset, sort, key)) + + +def url_encode_stream( + obj: t.Mapping[str, str] | t.Iterable[tuple[str, str]], + stream: t.IO[str] | None = None, + charset: str = "utf-8", + sort: bool = False, + key: t.Callable[[tuple[str, str]], t.Any] | None = None, + separator: str = "&", +) -> None: + """Like :meth:`url_encode` but writes the results to a stream + object. If the stream is `None` a generator over all encoded + pairs is returned. + + :param obj: the object to encode into a query string. + :param stream: a stream to write the encoded object into or `None` if + an iterator over the encoded pairs should be returned. In + that case the separator argument is ignored. + :param charset: the charset of the query string. + :param sort: set to `True` if you want parameters to be sorted by `key`. + :param separator: the separator to be used for the pairs. + :param key: an optional function to be used for sorting. For more details + check out the :func:`sorted` documentation. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 2.4. Use ``urllib.parse.urlencode`` instead. + + .. versionchanged:: 2.1 + The ``encode_keys`` parameter was removed. + + .. versionadded:: 0.8 + """ + warnings.warn( + "'werkzeug.urls.url_encode_stream' is deprecated and will be removed in" + " Werkzeug 2.4. Use 'urllib.parse.urlencode' instead.", + DeprecationWarning, + stacklevel=2, + ) + separator = _to_str(separator, "ascii") + gen = _url_encode_impl(obj, charset, sort, key) + if stream is None: + return gen # type: ignore + for idx, chunk in enumerate(gen): + if idx: + stream.write(separator) + stream.write(chunk) + return None + + +def url_join( + base: str | tuple[str, str, str, str, str], + url: str | tuple[str, str, str, str, str], + allow_fragments: bool = True, +) -> str: + """Join a base URL and a possibly relative URL to form an absolute + interpretation of the latter. + + :param base: the base URL for the join operation. + :param url: the URL to join. + :param allow_fragments: indicates whether fragments should be allowed. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 2.4. Use ``urllib.parse.urljoin`` instead. + """ + warnings.warn( + "'werkzeug.urls.url_join' is deprecated and will be removed in Werkzeug 2.4." + " Use 'urllib.parse.urljoin' instead.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(base, tuple): + base = url_unparse(base) + if isinstance(url, tuple): + url = url_unparse(url) + + _check_str_tuple((base, url)) + s = _make_encode_wrapper(base) + + if not base: + return url + if not url: + return base + + bscheme, bnetloc, bpath, bquery, bfragment = url_parse( + base, allow_fragments=allow_fragments + ) + scheme, netloc, path, query, fragment = url_parse(url, bscheme, allow_fragments) + if scheme != bscheme: + return url + if netloc: + return url_unparse((scheme, netloc, path, query, fragment)) + netloc = bnetloc + + if path[:1] == s("/"): + segments = path.split(s("/")) + elif not path: + segments = bpath.split(s("/")) + if not query: + query = bquery + else: + segments = bpath.split(s("/"))[:-1] + path.split(s("/")) + + # If the rightmost part is "./" we want to keep the slash but + # remove the dot. + if segments[-1] == s("."): + segments[-1] = s("") + + # Resolve ".." and "." + segments = [segment for segment in segments if segment != s(".")] + while True: + i = 1 + n = len(segments) - 1 + while i < n: + if segments[i] == s("..") and segments[i - 1] not in (s(""), s("..")): + del segments[i - 1 : i + 1] + break + i += 1 + else: + break + + # Remove trailing ".." if the URL is absolute + unwanted_marker = [s(""), s("..")] + while segments[:2] == unwanted_marker: + del segments[1] + + path = s("/").join(segments) + return url_unparse((scheme, netloc, path, query, fragment)) + + +def _urlencode( + query: t.Mapping[str, str] | t.Iterable[tuple[str, str]], encoding: str = "utf-8" +) -> str: + items = [x for x in iter_multi_items(query) if x[1] is not None] + # safe = https://url.spec.whatwg.org/#percent-encoded-bytes + return urlencode(items, safe="!$'()*,/:;?@", encoding=encoding) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/user_agent.py b/backend/test/lib/python3.8/site-packages/werkzeug/user_agent.py new file mode 100644 index 0000000000000000000000000000000000000000..17e5d3fdbf0560318981210c7c824fee10853d13 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/user_agent.py @@ -0,0 +1,47 @@ +from __future__ import annotations + + +class UserAgent: + """Represents a parsed user agent header value. + + The default implementation does no parsing, only the :attr:`string` + attribute is set. A subclass may parse the string to set the + common attributes or expose other information. Set + :attr:`werkzeug.wrappers.Request.user_agent_class` to use a + subclass. + + :param string: The header value to parse. + + .. versionadded:: 2.0 + This replaces the previous ``useragents`` module, but does not + provide a built-in parser. + """ + + platform: str | None = None + """The OS name, if it could be parsed from the string.""" + + browser: str | None = None + """The browser name, if it could be parsed from the string.""" + + version: str | None = None + """The browser version, if it could be parsed from the string.""" + + language: str | None = None + """The browser language, if it could be parsed from the string.""" + + def __init__(self, string: str) -> None: + self.string: str = string + """The original header value.""" + + def __repr__(self) -> str: + return f"<{type(self).__name__} {self.browser}/{self.version}>" + + def __str__(self) -> str: + return self.string + + def __bool__(self) -> bool: + return bool(self.browser) + + def to_header(self) -> str: + """Convert to a header value.""" + return self.string diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/utils.py b/backend/test/lib/python3.8/site-packages/werkzeug/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..785ac28b9808f36fea6088ca186e4341e782d1c1 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/utils.py @@ -0,0 +1,690 @@ +from __future__ import annotations + +import io +import mimetypes +import os +import pkgutil +import re +import sys +import typing as t +import unicodedata +from datetime import datetime +from time import time +from urllib.parse import quote +from zlib import adler32 + +from markupsafe import escape + +from ._internal import _DictAccessorProperty +from ._internal import _missing +from ._internal import _TAccessorValue +from .datastructures import Headers +from .exceptions import NotFound +from .exceptions import RequestedRangeNotSatisfiable +from .security import safe_join +from .wsgi import wrap_file + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIEnvironment + from .wrappers.request import Request + from .wrappers.response import Response + +_T = t.TypeVar("_T") + +_entity_re = re.compile(r"&([^;]+);") +_filename_ascii_strip_re = re.compile(r"[^A-Za-z0-9_.-]") +_windows_device_files = { + "CON", + "PRN", + "AUX", + "NUL", + *(f"COM{i}" for i in range(10)), + *(f"LPT{i}" for i in range(10)), +} + + +class cached_property(property, t.Generic[_T]): + """A :func:`property` that is only evaluated once. Subsequent access + returns the cached value. Setting the property sets the cached + value. Deleting the property clears the cached value, accessing it + again will evaluate it again. + + .. code-block:: python + + class Example: + @cached_property + def value(self): + # calculate something important here + return 42 + + e = Example() + e.value # evaluates + e.value # uses cache + e.value = 16 # sets cache + del e.value # clears cache + + If the class defines ``__slots__``, it must add ``_cache_{name}`` as + a slot. Alternatively, it can add ``__dict__``, but that's usually + not desirable. + + .. versionchanged:: 2.1 + Works with ``__slots__``. + + .. versionchanged:: 2.0 + ``del obj.name`` clears the cached value. + """ + + def __init__( + self, + fget: t.Callable[[t.Any], _T], + name: str | None = None, + doc: str | None = None, + ) -> None: + super().__init__(fget, doc=doc) + self.__name__ = name or fget.__name__ + self.slot_name = f"_cache_{self.__name__}" + self.__module__ = fget.__module__ + + def __set__(self, obj: object, value: _T) -> None: + if hasattr(obj, "__dict__"): + obj.__dict__[self.__name__] = value + else: + setattr(obj, self.slot_name, value) + + def __get__(self, obj: object, type: type = None) -> _T: # type: ignore + if obj is None: + return self # type: ignore + + obj_dict = getattr(obj, "__dict__", None) + + if obj_dict is not None: + value: _T = obj_dict.get(self.__name__, _missing) + else: + value = getattr(obj, self.slot_name, _missing) # type: ignore[arg-type] + + if value is _missing: + value = self.fget(obj) # type: ignore + + if obj_dict is not None: + obj.__dict__[self.__name__] = value + else: + setattr(obj, self.slot_name, value) + + return value + + def __delete__(self, obj: object) -> None: + if hasattr(obj, "__dict__"): + del obj.__dict__[self.__name__] + else: + setattr(obj, self.slot_name, _missing) + + +class environ_property(_DictAccessorProperty[_TAccessorValue]): + """Maps request attributes to environment variables. This works not only + for the Werkzeug request object, but also any other class with an + environ attribute: + + >>> class Test(object): + ... environ = {'key': 'value'} + ... test = environ_property('key') + >>> var = Test() + >>> var.test + 'value' + + If you pass it a second value it's used as default if the key does not + exist, the third one can be a converter that takes a value and converts + it. If it raises :exc:`ValueError` or :exc:`TypeError` the default value + is used. If no default value is provided `None` is used. + + Per default the property is read only. You have to explicitly enable it + by passing ``read_only=False`` to the constructor. + """ + + read_only = True + + def lookup(self, obj: Request) -> WSGIEnvironment: + return obj.environ + + +class header_property(_DictAccessorProperty[_TAccessorValue]): + """Like `environ_property` but for headers.""" + + def lookup(self, obj: Request | Response) -> Headers: + return obj.headers + + +# https://cgit.freedesktop.org/xdg/shared-mime-info/tree/freedesktop.org.xml.in +# https://www.iana.org/assignments/media-types/media-types.xhtml +# Types listed in the XDG mime info that have a charset in the IANA registration. +_charset_mimetypes = { + "application/ecmascript", + "application/javascript", + "application/sql", + "application/xml", + "application/xml-dtd", + "application/xml-external-parsed-entity", +} + + +def get_content_type(mimetype: str, charset: str) -> str: + """Returns the full content type string with charset for a mimetype. + + If the mimetype represents text, the charset parameter will be + appended, otherwise the mimetype is returned unchanged. + + :param mimetype: The mimetype to be used as content type. + :param charset: The charset to be appended for text mimetypes. + :return: The content type. + + .. versionchanged:: 0.15 + Any type that ends with ``+xml`` gets a charset, not just those + that start with ``application/``. Known text types such as + ``application/javascript`` are also given charsets. + """ + if ( + mimetype.startswith("text/") + or mimetype in _charset_mimetypes + or mimetype.endswith("+xml") + ): + mimetype += f"; charset={charset}" + + return mimetype + + +def secure_filename(filename: str) -> str: + r"""Pass it a filename and it will return a secure version of it. This + filename can then safely be stored on a regular file system and passed + to :func:`os.path.join`. The filename returned is an ASCII only string + for maximum portability. + + On windows systems the function also makes sure that the file is not + named after one of the special device files. + + >>> secure_filename("My cool movie.mov") + 'My_cool_movie.mov' + >>> secure_filename("../../../etc/passwd") + 'etc_passwd' + >>> secure_filename('i contain cool \xfcml\xe4uts.txt') + 'i_contain_cool_umlauts.txt' + + The function might return an empty filename. It's your responsibility + to ensure that the filename is unique and that you abort or + generate a random filename if the function returned an empty one. + + .. versionadded:: 0.5 + + :param filename: the filename to secure + """ + filename = unicodedata.normalize("NFKD", filename) + filename = filename.encode("ascii", "ignore").decode("ascii") + + for sep in os.sep, os.path.altsep: + if sep: + filename = filename.replace(sep, " ") + filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip( + "._" + ) + + # on nt a couple of special files are present in each folder. We + # have to ensure that the target file is not such a filename. In + # this case we prepend an underline + if ( + os.name == "nt" + and filename + and filename.split(".")[0].upper() in _windows_device_files + ): + filename = f"_{filename}" + + return filename + + +def redirect( + location: str, code: int = 302, Response: type[Response] | None = None +) -> Response: + """Returns a response object (a WSGI application) that, if called, + redirects the client to the target location. Supported codes are + 301, 302, 303, 305, 307, and 308. 300 is not supported because + it's not a real redirect and 304 because it's the answer for a + request with a request with defined If-Modified-Since headers. + + .. versionadded:: 0.6 + The location can now be a unicode string that is encoded using + the :func:`iri_to_uri` function. + + .. versionadded:: 0.10 + The class used for the Response object can now be passed in. + + :param location: the location the response should redirect to. + :param code: the redirect status code. defaults to 302. + :param class Response: a Response class to use when instantiating a + response. The default is :class:`werkzeug.wrappers.Response` if + unspecified. + """ + if Response is None: + from .wrappers import Response + + html_location = escape(location) + response = Response( # type: ignore[misc] + "<!doctype html>\n" + "<html lang=en>\n" + "<title>Redirecting...</title>\n" + "<h1>Redirecting...</h1>\n" + "<p>You should be redirected automatically to the target URL: " + f'<a href="{html_location}">{html_location}</a>. If not, click the link.\n', + code, + mimetype="text/html", + ) + response.headers["Location"] = location + return response + + +def append_slash_redirect(environ: WSGIEnvironment, code: int = 308) -> Response: + """Redirect to the current URL with a slash appended. + + If the current URL is ``/user/42``, the redirect URL will be + ``42/``. When joined to the current URL during response + processing or by the browser, this will produce ``/user/42/``. + + The behavior is undefined if the path ends with a slash already. If + called unconditionally on a URL, it may produce a redirect loop. + + :param environ: Use the path and query from this WSGI environment + to produce the redirect URL. + :param code: the status code for the redirect. + + .. versionchanged:: 2.1 + Produce a relative URL that only modifies the last segment. + Relevant when the current path has multiple segments. + + .. versionchanged:: 2.1 + The default status code is 308 instead of 301. This preserves + the request method and body. + """ + tail = environ["PATH_INFO"].rpartition("/")[2] + + if not tail: + new_path = "./" + else: + new_path = f"{tail}/" + + query_string = environ.get("QUERY_STRING") + + if query_string: + new_path = f"{new_path}?{query_string}" + + return redirect(new_path, code) + + +def send_file( + path_or_file: os.PathLike | str | t.IO[bytes], + environ: WSGIEnvironment, + mimetype: str | None = None, + as_attachment: bool = False, + download_name: str | None = None, + conditional: bool = True, + etag: bool | str = True, + last_modified: datetime | int | float | None = None, + max_age: None | (int | t.Callable[[str | None], int | None]) = None, + use_x_sendfile: bool = False, + response_class: type[Response] | None = None, + _root_path: os.PathLike | str | None = None, +) -> Response: + """Send the contents of a file to the client. + + The first argument can be a file path or a file-like object. Paths + are preferred in most cases because Werkzeug can manage the file and + get extra information from the path. Passing a file-like object + requires that the file is opened in binary mode, and is mostly + useful when building a file in memory with :class:`io.BytesIO`. + + Never pass file paths provided by a user. The path is assumed to be + trusted, so a user could craft a path to access a file you didn't + intend. Use :func:`send_from_directory` to safely serve user-provided paths. + + If the WSGI server sets a ``file_wrapper`` in ``environ``, it is + used, otherwise Werkzeug's built-in wrapper is used. Alternatively, + if the HTTP server supports ``X-Sendfile``, ``use_x_sendfile=True`` + will tell the server to send the given path, which is much more + efficient than reading it in Python. + + :param path_or_file: The path to the file to send, relative to the + current working directory if a relative path is given. + Alternatively, a file-like object opened in binary mode. Make + sure the file pointer is seeked to the start of the data. + :param environ: The WSGI environ for the current request. + :param mimetype: The MIME type to send for the file. If not + provided, it will try to detect it from the file name. + :param as_attachment: Indicate to a browser that it should offer to + save the file instead of displaying it. + :param download_name: The default name browsers will use when saving + the file. Defaults to the passed file name. + :param conditional: Enable conditional and range responses based on + request headers. Requires passing a file path and ``environ``. + :param etag: Calculate an ETag for the file, which requires passing + a file path. Can also be a string to use instead. + :param last_modified: The last modified time to send for the file, + in seconds. If not provided, it will try to detect it from the + file path. + :param max_age: How long the client should cache the file, in + seconds. If set, ``Cache-Control`` will be ``public``, otherwise + it will be ``no-cache`` to prefer conditional caching. + :param use_x_sendfile: Set the ``X-Sendfile`` header to let the + server to efficiently send the file. Requires support from the + HTTP server. Requires passing a file path. + :param response_class: Build the response using this class. Defaults + to :class:`~werkzeug.wrappers.Response`. + :param _root_path: Do not use. For internal use only. Use + :func:`send_from_directory` to safely send files under a path. + + .. versionchanged:: 2.0.2 + ``send_file`` only sets a detected ``Content-Encoding`` if + ``as_attachment`` is disabled. + + .. versionadded:: 2.0 + Adapted from Flask's implementation. + + .. versionchanged:: 2.0 + ``download_name`` replaces Flask's ``attachment_filename`` + parameter. If ``as_attachment=False``, it is passed with + ``Content-Disposition: inline`` instead. + + .. versionchanged:: 2.0 + ``max_age`` replaces Flask's ``cache_timeout`` parameter. + ``conditional`` is enabled and ``max_age`` is not set by + default. + + .. versionchanged:: 2.0 + ``etag`` replaces Flask's ``add_etags`` parameter. It can be a + string to use instead of generating one. + + .. versionchanged:: 2.0 + If an encoding is returned when guessing ``mimetype`` from + ``download_name``, set the ``Content-Encoding`` header. + """ + if response_class is None: + from .wrappers import Response + + response_class = Response + + path: str | None = None + file: t.IO[bytes] | None = None + size: int | None = None + mtime: float | None = None + headers = Headers() + + if isinstance(path_or_file, (os.PathLike, str)) or hasattr( + path_or_file, "__fspath__" + ): + path_or_file = t.cast(t.Union[os.PathLike, str], path_or_file) + + # Flask will pass app.root_path, allowing its send_file wrapper + # to not have to deal with paths. + if _root_path is not None: + path = os.path.join(_root_path, path_or_file) + else: + path = os.path.abspath(path_or_file) + + stat = os.stat(path) + size = stat.st_size + mtime = stat.st_mtime + else: + file = path_or_file + + if download_name is None and path is not None: + download_name = os.path.basename(path) + + if mimetype is None: + if download_name is None: + raise TypeError( + "Unable to detect the MIME type because a file name is" + " not available. Either set 'download_name', pass a" + " path instead of a file, or set 'mimetype'." + ) + + mimetype, encoding = mimetypes.guess_type(download_name) + + if mimetype is None: + mimetype = "application/octet-stream" + + # Don't send encoding for attachments, it causes browsers to + # save decompress tar.gz files. + if encoding is not None and not as_attachment: + headers.set("Content-Encoding", encoding) + + if download_name is not None: + try: + download_name.encode("ascii") + except UnicodeEncodeError: + simple = unicodedata.normalize("NFKD", download_name) + simple = simple.encode("ascii", "ignore").decode("ascii") + # safe = RFC 5987 attr-char + quoted = quote(download_name, safe="!#$&+-.^_`|~") + names = {"filename": simple, "filename*": f"UTF-8''{quoted}"} + else: + names = {"filename": download_name} + + value = "attachment" if as_attachment else "inline" + headers.set("Content-Disposition", value, **names) + elif as_attachment: + raise TypeError( + "No name provided for attachment. Either set" + " 'download_name' or pass a path instead of a file." + ) + + if use_x_sendfile and path is not None: + headers["X-Sendfile"] = path + data = None + else: + if file is None: + file = open(path, "rb") # type: ignore + elif isinstance(file, io.BytesIO): + size = file.getbuffer().nbytes + elif isinstance(file, io.TextIOBase): + raise ValueError("Files must be opened in binary mode or use BytesIO.") + + data = wrap_file(environ, file) + + rv = response_class( + data, mimetype=mimetype, headers=headers, direct_passthrough=True + ) + + if size is not None: + rv.content_length = size + + if last_modified is not None: + rv.last_modified = last_modified # type: ignore + elif mtime is not None: + rv.last_modified = mtime # type: ignore + + rv.cache_control.no_cache = True + + # Flask will pass app.get_send_file_max_age, allowing its send_file + # wrapper to not have to deal with paths. + if callable(max_age): + max_age = max_age(path) + + if max_age is not None: + if max_age > 0: + rv.cache_control.no_cache = None + rv.cache_control.public = True + + rv.cache_control.max_age = max_age + rv.expires = int(time() + max_age) # type: ignore + + if isinstance(etag, str): + rv.set_etag(etag) + elif etag and path is not None: + check = adler32(path.encode("utf-8")) & 0xFFFFFFFF + rv.set_etag(f"{mtime}-{size}-{check}") + + if conditional: + try: + rv = rv.make_conditional(environ, accept_ranges=True, complete_length=size) + except RequestedRangeNotSatisfiable: + if file is not None: + file.close() + + raise + + # Some x-sendfile implementations incorrectly ignore the 304 + # status code and send the file anyway. + if rv.status_code == 304: + rv.headers.pop("x-sendfile", None) + + return rv + + +def send_from_directory( + directory: os.PathLike | str, + path: os.PathLike | str, + environ: WSGIEnvironment, + **kwargs: t.Any, +) -> Response: + """Send a file from within a directory using :func:`send_file`. + + This is a secure way to serve files from a folder, such as static + files or uploads. Uses :func:`~werkzeug.security.safe_join` to + ensure the path coming from the client is not maliciously crafted to + point outside the specified directory. + + If the final path does not point to an existing regular file, + returns a 404 :exc:`~werkzeug.exceptions.NotFound` error. + + :param directory: The directory that ``path`` must be located under. This *must not* + be a value provided by the client, otherwise it becomes insecure. + :param path: The path to the file to send, relative to ``directory``. This is the + part of the path provided by the client, which is checked for security. + :param environ: The WSGI environ for the current request. + :param kwargs: Arguments to pass to :func:`send_file`. + + .. versionadded:: 2.0 + Adapted from Flask's implementation. + """ + path = safe_join(os.fspath(directory), os.fspath(path)) + + if path is None: + raise NotFound() + + # Flask will pass app.root_path, allowing its send_from_directory + # wrapper to not have to deal with paths. + if "_root_path" in kwargs: + path = os.path.join(kwargs["_root_path"], path) + + if not os.path.isfile(path): + raise NotFound() + + return send_file(path, environ, **kwargs) + + +def import_string(import_name: str, silent: bool = False) -> t.Any: + """Imports an object based on a string. This is useful if you want to + use import paths as endpoints or something similar. An import path can + be specified either in dotted notation (``xml.sax.saxutils.escape``) + or with a colon as object delimiter (``xml.sax.saxutils:escape``). + + If `silent` is True the return value will be `None` if the import fails. + + :param import_name: the dotted name for the object to import. + :param silent: if set to `True` import errors are ignored and + `None` is returned instead. + :return: imported object + """ + import_name = import_name.replace(":", ".") + try: + try: + __import__(import_name) + except ImportError: + if "." not in import_name: + raise + else: + return sys.modules[import_name] + + module_name, obj_name = import_name.rsplit(".", 1) + module = __import__(module_name, globals(), locals(), [obj_name]) + try: + return getattr(module, obj_name) + except AttributeError as e: + raise ImportError(e) from None + + except ImportError as e: + if not silent: + raise ImportStringError(import_name, e).with_traceback( + sys.exc_info()[2] + ) from None + + return None + + +def find_modules( + import_path: str, include_packages: bool = False, recursive: bool = False +) -> t.Iterator[str]: + """Finds all the modules below a package. This can be useful to + automatically import all views / controllers so that their metaclasses / + function decorators have a chance to register themselves on the + application. + + Packages are not returned unless `include_packages` is `True`. This can + also recursively list modules but in that case it will import all the + packages to get the correct load path of that module. + + :param import_path: the dotted name for the package to find child modules. + :param include_packages: set to `True` if packages should be returned, too. + :param recursive: set to `True` if recursion should happen. + :return: generator + """ + module = import_string(import_path) + path = getattr(module, "__path__", None) + if path is None: + raise ValueError(f"{import_path!r} is not a package") + basename = f"{module.__name__}." + for _importer, modname, ispkg in pkgutil.iter_modules(path): + modname = basename + modname + if ispkg: + if include_packages: + yield modname + if recursive: + yield from find_modules(modname, include_packages, True) + else: + yield modname + + +class ImportStringError(ImportError): + """Provides information about a failed :func:`import_string` attempt.""" + + #: String in dotted notation that failed to be imported. + import_name: str + #: Wrapped exception. + exception: BaseException + + def __init__(self, import_name: str, exception: BaseException) -> None: + self.import_name = import_name + self.exception = exception + msg = import_name + name = "" + tracked = [] + for part in import_name.replace(":", ".").split("."): + name = f"{name}.{part}" if name else part + imported = import_string(name, silent=True) + if imported: + tracked.append((name, getattr(imported, "__file__", None))) + else: + track = [f"- {n!r} found in {i!r}." for n, i in tracked] + track.append(f"- {name!r} not found.") + track_str = "\n".join(track) + msg = ( + f"import_string() failed for {import_name!r}. Possible reasons" + f" are:\n\n" + "- missing __init__.py in a package;\n" + "- package or module path not included in sys.path;\n" + "- duplicated package or module name taking precedence in" + " sys.path;\n" + "- missing module, class, function or variable;\n\n" + f"Debugged import:\n\n{track_str}\n\n" + f"Original exception:\n\n{type(exception).__name__}: {exception}" + ) + break + + super().__init__(msg) + + def __repr__(self) -> str: + return f"<{type(self).__name__}({self.import_name!r}, {self.exception!r})>" diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b8c45d71cf335da7b6fdc6a2117e1c46dbf2d6e3 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__init__.py @@ -0,0 +1,3 @@ +from .request import Request as Request +from .response import Response as Response +from .response import ResponseStream diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..222296e9e67f276a81deb8e527c90545247a0306 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/request.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/request.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9606d794327704b745cfa669367762165246131e Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/request.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/response.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/response.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb229334bf93a264e4ba2dc9eac81c17f86f2189 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/__pycache__/response.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/request.py b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/request.py new file mode 100644 index 0000000000000000000000000000000000000000..f4f51b1dc60ffc3df0f3b1dc886eb0e0e6e332e4 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/request.py @@ -0,0 +1,653 @@ +from __future__ import annotations + +import functools +import json +import typing as t +from io import BytesIO + +from .._internal import _wsgi_decoding_dance +from ..datastructures import CombinedMultiDict +from ..datastructures import EnvironHeaders +from ..datastructures import FileStorage +from ..datastructures import ImmutableMultiDict +from ..datastructures import iter_multi_items +from ..datastructures import MultiDict +from ..exceptions import BadRequest +from ..exceptions import UnsupportedMediaType +from ..formparser import default_stream_factory +from ..formparser import FormDataParser +from ..sansio.request import Request as _SansIORequest +from ..utils import cached_property +from ..utils import environ_property +from ..wsgi import _get_server +from ..wsgi import get_input_stream + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +class Request(_SansIORequest): + """Represents an incoming WSGI HTTP request, with headers and body + taken from the WSGI environment. Has properties and methods for + using the functionality defined by various HTTP specs. The data in + requests object is read-only. + + Text data is assumed to use UTF-8 encoding, which should be true for + the vast majority of modern clients. Using an encoding set by the + client is unsafe in Python due to extra encodings it provides, such + as ``zip``. To change the assumed encoding, subclass and replace + :attr:`charset`. + + :param environ: The WSGI environ is generated by the WSGI server and + contains information about the server configuration and client + request. + :param populate_request: Add this request object to the WSGI environ + as ``environ['werkzeug.request']``. Can be useful when + debugging. + :param shallow: Makes reading from :attr:`stream` (and any method + that would read from it) raise a :exc:`RuntimeError`. Useful to + prevent consuming the form data in middleware, which would make + it unavailable to the final application. + + .. versionchanged:: 2.1 + Old ``BaseRequest`` and mixin classes were removed. + + .. versionchanged:: 2.1 + Remove the ``disable_data_descriptor`` attribute. + + .. versionchanged:: 2.0 + Combine ``BaseRequest`` and mixins into a single ``Request`` + class. + + .. versionchanged:: 0.5 + Read-only mode is enforced with immutable classes for all data. + """ + + #: the maximum content length. This is forwarded to the form data + #: parsing function (:func:`parse_form_data`). When set and the + #: :attr:`form` or :attr:`files` attribute is accessed and the + #: parsing fails because more than the specified value is transmitted + #: a :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. + #: + #: .. versionadded:: 0.5 + max_content_length: int | None = None + + #: the maximum form field size. This is forwarded to the form data + #: parsing function (:func:`parse_form_data`). When set and the + #: :attr:`form` or :attr:`files` attribute is accessed and the + #: data in memory for post data is longer than the specified value a + #: :exc:`~werkzeug.exceptions.RequestEntityTooLarge` exception is raised. + #: + #: .. versionadded:: 0.5 + max_form_memory_size: int | None = None + + #: The maximum number of multipart parts to parse, passed to + #: :attr:`form_data_parser_class`. Parsing form data with more than this + #: many parts will raise :exc:`~.RequestEntityTooLarge`. + #: + #: .. versionadded:: 2.2.3 + max_form_parts = 1000 + + #: The form data parser that should be used. Can be replaced to customize + #: the form date parsing. + form_data_parser_class: type[FormDataParser] = FormDataParser + + #: The WSGI environment containing HTTP headers and information from + #: the WSGI server. + environ: WSGIEnvironment + + #: Set when creating the request object. If ``True``, reading from + #: the request body will cause a ``RuntimeException``. Useful to + #: prevent modifying the stream from middleware. + shallow: bool + + def __init__( + self, + environ: WSGIEnvironment, + populate_request: bool = True, + shallow: bool = False, + ) -> None: + super().__init__( + method=environ.get("REQUEST_METHOD", "GET"), + scheme=environ.get("wsgi.url_scheme", "http"), + server=_get_server(environ), + root_path=_wsgi_decoding_dance(environ.get("SCRIPT_NAME") or ""), + path=_wsgi_decoding_dance(environ.get("PATH_INFO") or ""), + query_string=environ.get("QUERY_STRING", "").encode("latin1"), + headers=EnvironHeaders(environ), + remote_addr=environ.get("REMOTE_ADDR"), + ) + self.environ = environ + self.shallow = shallow + + if populate_request and not shallow: + self.environ["werkzeug.request"] = self + + @classmethod + def from_values(cls, *args: t.Any, **kwargs: t.Any) -> Request: + """Create a new request object based on the values provided. If + environ is given missing values are filled from there. This method is + useful for small scripts when you need to simulate a request from an URL. + Do not use this method for unittesting, there is a full featured client + object (:class:`Client`) that allows to create multipart requests, + support for cookies etc. + + This accepts the same options as the + :class:`~werkzeug.test.EnvironBuilder`. + + .. versionchanged:: 0.5 + This method now accepts the same arguments as + :class:`~werkzeug.test.EnvironBuilder`. Because of this the + `environ` parameter is now called `environ_overrides`. + + :return: request object + """ + from ..test import EnvironBuilder + + kwargs.setdefault( + "charset", cls.charset if not isinstance(cls.charset, property) else None + ) + builder = EnvironBuilder(*args, **kwargs) + try: + return builder.get_request(cls) + finally: + builder.close() + + @classmethod + def application(cls, f: t.Callable[[Request], WSGIApplication]) -> WSGIApplication: + """Decorate a function as responder that accepts the request as + the last argument. This works like the :func:`responder` + decorator but the function is passed the request object as the + last argument and the request object will be closed + automatically:: + + @Request.application + def my_wsgi_app(request): + return Response('Hello World!') + + As of Werkzeug 0.14 HTTP exceptions are automatically caught and + converted to responses instead of failing. + + :param f: the WSGI callable to decorate + :return: a new WSGI callable + """ + #: return a callable that wraps the -2nd argument with the request + #: and calls the function with all the arguments up to that one and + #: the request. The return value is then called with the latest + #: two arguments. This makes it possible to use this decorator for + #: both standalone WSGI functions as well as bound methods and + #: partially applied functions. + from ..exceptions import HTTPException + + @functools.wraps(f) + def application(*args): # type: ignore + request = cls(args[-2]) + with request: + try: + resp = f(*args[:-2] + (request,)) + except HTTPException as e: + resp = e.get_response(args[-2]) + return resp(*args[-2:]) + + return t.cast("WSGIApplication", application) + + def _get_file_stream( + self, + total_content_length: int | None, + content_type: str | None, + filename: str | None = None, + content_length: int | None = None, + ) -> t.IO[bytes]: + """Called to get a stream for the file upload. + + This must provide a file-like class with `read()`, `readline()` + and `seek()` methods that is both writeable and readable. + + The default implementation returns a temporary file if the total + content length is higher than 500KB. Because many browsers do not + provide a content length for the files only the total content + length matters. + + :param total_content_length: the total content length of all the + data in the request combined. This value + is guaranteed to be there. + :param content_type: the mimetype of the uploaded file. + :param filename: the filename of the uploaded file. May be `None`. + :param content_length: the length of this file. This value is usually + not provided because webbrowsers do not provide + this value. + """ + return default_stream_factory( + total_content_length=total_content_length, + filename=filename, + content_type=content_type, + content_length=content_length, + ) + + @property + def want_form_data_parsed(self) -> bool: + """``True`` if the request method carries content. By default + this is true if a ``Content-Type`` is sent. + + .. versionadded:: 0.8 + """ + return bool(self.environ.get("CONTENT_TYPE")) + + def make_form_data_parser(self) -> FormDataParser: + """Creates the form data parser. Instantiates the + :attr:`form_data_parser_class` with some parameters. + + .. versionadded:: 0.8 + """ + charset = self._charset if self._charset != "utf-8" else None + errors = self._encoding_errors if self._encoding_errors != "replace" else None + return self.form_data_parser_class( + stream_factory=self._get_file_stream, + charset=charset, + errors=errors, + max_form_memory_size=self.max_form_memory_size, + max_content_length=self.max_content_length, + max_form_parts=self.max_form_parts, + cls=self.parameter_storage_class, + ) + + def _load_form_data(self) -> None: + """Method used internally to retrieve submitted data. After calling + this sets `form` and `files` on the request object to multi dicts + filled with the incoming form data. As a matter of fact the input + stream will be empty afterwards. You can also call this method to + force the parsing of the form data. + + .. versionadded:: 0.8 + """ + # abort early if we have already consumed the stream + if "form" in self.__dict__: + return + + if self.want_form_data_parsed: + parser = self.make_form_data_parser() + data = parser.parse( + self._get_stream_for_parsing(), + self.mimetype, + self.content_length, + self.mimetype_params, + ) + else: + data = ( + self.stream, + self.parameter_storage_class(), + self.parameter_storage_class(), + ) + + # inject the values into the instance dict so that we bypass + # our cached_property non-data descriptor. + d = self.__dict__ + d["stream"], d["form"], d["files"] = data + + def _get_stream_for_parsing(self) -> t.IO[bytes]: + """This is the same as accessing :attr:`stream` with the difference + that if it finds cached data from calling :meth:`get_data` first it + will create a new stream out of the cached data. + + .. versionadded:: 0.9.3 + """ + cached_data = getattr(self, "_cached_data", None) + if cached_data is not None: + return BytesIO(cached_data) + return self.stream + + def close(self) -> None: + """Closes associated resources of this request object. This + closes all file handles explicitly. You can also use the request + object in a with statement which will automatically close it. + + .. versionadded:: 0.9 + """ + files = self.__dict__.get("files") + for _key, value in iter_multi_items(files or ()): + value.close() + + def __enter__(self) -> Request: + return self + + def __exit__(self, exc_type, exc_value, tb) -> None: # type: ignore + self.close() + + @cached_property + def stream(self) -> t.IO[bytes]: + """The WSGI input stream, with safety checks. This stream can only be consumed + once. + + Use :meth:`get_data` to get the full data as bytes or text. The :attr:`data` + attribute will contain the full bytes only if they do not represent form data. + The :attr:`form` attribute will contain the parsed form data in that case. + + Unlike :attr:`input_stream`, this stream guards against infinite streams or + reading past :attr:`content_length` or :attr:`max_content_length`. + + If ``max_content_length`` is set, it can be enforced on streams if + ``wsgi.input_terminated`` is set. Otherwise, an empty stream is returned. + + If the limit is reached before the underlying stream is exhausted (such as a + file that is too large, or an infinite stream), the remaining contents of the + stream cannot be read safely. Depending on how the server handles this, clients + may show a "connection reset" failure instead of seeing the 413 response. + + .. versionchanged:: 2.3 + Check ``max_content_length`` preemptively and while reading. + + .. versionchanged:: 0.9 + The stream is always set (but may be consumed) even if form parsing was + accessed first. + """ + if self.shallow: + raise RuntimeError( + "This request was created with 'shallow=True', reading" + " from the input stream is disabled." + ) + + return get_input_stream( + self.environ, max_content_length=self.max_content_length + ) + + input_stream = environ_property[t.IO[bytes]]( + "wsgi.input", + doc="""The raw WSGI input stream, without any safety checks. + + This is dangerous to use. It does not guard against infinite streams or reading + past :attr:`content_length` or :attr:`max_content_length`. + + Use :attr:`stream` instead. + """, + ) + + @cached_property + def data(self) -> bytes: + """The raw data read from :attr:`stream`. Will be empty if the request + represents form data. + + To get the raw data even if it represents form data, use :meth:`get_data`. + """ + return self.get_data(parse_form_data=True) + + @t.overload + def get_data( # type: ignore + self, + cache: bool = True, + as_text: t.Literal[False] = False, + parse_form_data: bool = False, + ) -> bytes: + ... + + @t.overload + def get_data( + self, + cache: bool = True, + as_text: t.Literal[True] = ..., + parse_form_data: bool = False, + ) -> str: + ... + + def get_data( + self, cache: bool = True, as_text: bool = False, parse_form_data: bool = False + ) -> bytes | str: + """This reads the buffered incoming data from the client into one + bytes object. By default this is cached but that behavior can be + changed by setting `cache` to `False`. + + Usually it's a bad idea to call this method without checking the + content length first as a client could send dozens of megabytes or more + to cause memory problems on the server. + + Note that if the form data was already parsed this method will not + return anything as form data parsing does not cache the data like + this method does. To implicitly invoke form data parsing function + set `parse_form_data` to `True`. When this is done the return value + of this method will be an empty string if the form parser handles + the data. This generally is not necessary as if the whole data is + cached (which is the default) the form parser will used the cached + data to parse the form data. Please be generally aware of checking + the content length first in any case before calling this method + to avoid exhausting server memory. + + If `as_text` is set to `True` the return value will be a decoded + string. + + .. versionadded:: 0.9 + """ + rv = getattr(self, "_cached_data", None) + if rv is None: + if parse_form_data: + self._load_form_data() + rv = self.stream.read() + if cache: + self._cached_data = rv + if as_text: + rv = rv.decode(self._charset, self._encoding_errors) + return rv + + @cached_property + def form(self) -> ImmutableMultiDict[str, str]: + """The form parameters. By default an + :class:`~werkzeug.datastructures.ImmutableMultiDict` + is returned from this function. This can be changed by setting + :attr:`parameter_storage_class` to a different type. This might + be necessary if the order of the form data is important. + + Please keep in mind that file uploads will not end up here, but instead + in the :attr:`files` attribute. + + .. versionchanged:: 0.9 + + Previous to Werkzeug 0.9 this would only contain form data for POST + and PUT requests. + """ + self._load_form_data() + return self.form + + @cached_property + def values(self) -> CombinedMultiDict[str, str]: + """A :class:`werkzeug.datastructures.CombinedMultiDict` that + combines :attr:`args` and :attr:`form`. + + For GET requests, only ``args`` are present, not ``form``. + + .. versionchanged:: 2.0 + For GET requests, only ``args`` are present, not ``form``. + """ + sources = [self.args] + + if self.method != "GET": + # GET requests can have a body, and some caching proxies + # might not treat that differently than a normal GET + # request, allowing form data to "invisibly" affect the + # cache without indication in the query string / URL. + sources.append(self.form) + + args = [] + + for d in sources: + if not isinstance(d, MultiDict): + d = MultiDict(d) + + args.append(d) + + return CombinedMultiDict(args) + + @cached_property + def files(self) -> ImmutableMultiDict[str, FileStorage]: + """:class:`~werkzeug.datastructures.MultiDict` object containing + all uploaded files. Each key in :attr:`files` is the name from the + ``<input type="file" name="">``. Each value in :attr:`files` is a + Werkzeug :class:`~werkzeug.datastructures.FileStorage` object. + + It basically behaves like a standard file object you know from Python, + with the difference that it also has a + :meth:`~werkzeug.datastructures.FileStorage.save` function that can + store the file on the filesystem. + + Note that :attr:`files` will only contain data if the request method was + POST, PUT or PATCH and the ``<form>`` that posted to the request had + ``enctype="multipart/form-data"``. It will be empty otherwise. + + See the :class:`~werkzeug.datastructures.MultiDict` / + :class:`~werkzeug.datastructures.FileStorage` documentation for + more details about the used data structure. + """ + self._load_form_data() + return self.files + + @property + def script_root(self) -> str: + """Alias for :attr:`self.root_path`. ``environ["SCRIPT_ROOT"]`` + without a trailing slash. + """ + return self.root_path + + @cached_property + def url_root(self) -> str: + """Alias for :attr:`root_url`. The URL with scheme, host, and + root path. For example, ``https://example.com/app/``. + """ + return self.root_url + + remote_user = environ_property[str]( + "REMOTE_USER", + doc="""If the server supports user authentication, and the + script is protected, this attribute contains the username the + user has authenticated as.""", + ) + is_multithread = environ_property[bool]( + "wsgi.multithread", + doc="""boolean that is `True` if the application is served by a + multithreaded WSGI server.""", + ) + is_multiprocess = environ_property[bool]( + "wsgi.multiprocess", + doc="""boolean that is `True` if the application is served by a + WSGI server that spawns multiple processes.""", + ) + is_run_once = environ_property[bool]( + "wsgi.run_once", + doc="""boolean that is `True` if the application will be + executed only once in a process lifetime. This is the case for + CGI for example, but it's not guaranteed that the execution only + happens one time.""", + ) + + # JSON + + #: A module or other object that has ``dumps`` and ``loads`` + #: functions that match the API of the built-in :mod:`json` module. + json_module = json + + @property + def json(self) -> t.Any | None: + """The parsed JSON data if :attr:`mimetype` indicates JSON + (:mimetype:`application/json`, see :attr:`is_json`). + + Calls :meth:`get_json` with default arguments. + + If the request content type is not ``application/json``, this + will raise a 415 Unsupported Media Type error. + + .. versionchanged:: 2.3 + Raise a 415 error instead of 400. + + .. versionchanged:: 2.1 + Raise a 400 error if the content type is incorrect. + """ + return self.get_json() + + # Cached values for ``(silent=False, silent=True)``. Initialized + # with sentinel values. + _cached_json: tuple[t.Any, t.Any] = (Ellipsis, Ellipsis) + + @t.overload + def get_json( + self, force: bool = ..., silent: t.Literal[False] = ..., cache: bool = ... + ) -> t.Any: + ... + + @t.overload + def get_json( + self, force: bool = ..., silent: bool = ..., cache: bool = ... + ) -> t.Any | None: + ... + + def get_json( + self, force: bool = False, silent: bool = False, cache: bool = True + ) -> t.Any | None: + """Parse :attr:`data` as JSON. + + If the mimetype does not indicate JSON + (:mimetype:`application/json`, see :attr:`is_json`), or parsing + fails, :meth:`on_json_loading_failed` is called and + its return value is used as the return value. By default this + raises a 415 Unsupported Media Type resp. + + :param force: Ignore the mimetype and always try to parse JSON. + :param silent: Silence mimetype and parsing errors, and + return ``None`` instead. + :param cache: Store the parsed JSON to return for subsequent + calls. + + .. versionchanged:: 2.3 + Raise a 415 error instead of 400. + + .. versionchanged:: 2.1 + Raise a 400 error if the content type is incorrect. + """ + if cache and self._cached_json[silent] is not Ellipsis: + return self._cached_json[silent] + + if not (force or self.is_json): + if not silent: + return self.on_json_loading_failed(None) + else: + return None + + data = self.get_data(cache=cache) + + try: + rv = self.json_module.loads(data) + except ValueError as e: + if silent: + rv = None + + if cache: + normal_rv, _ = self._cached_json + self._cached_json = (normal_rv, rv) + else: + rv = self.on_json_loading_failed(e) + + if cache: + _, silent_rv = self._cached_json + self._cached_json = (rv, silent_rv) + else: + if cache: + self._cached_json = (rv, rv) + + return rv + + def on_json_loading_failed(self, e: ValueError | None) -> t.Any: + """Called if :meth:`get_json` fails and isn't silenced. + + If this method returns a value, it is used as the return value + for :meth:`get_json`. The default implementation raises + :exc:`~werkzeug.exceptions.BadRequest`. + + :param e: If parsing failed, this is the exception. It will be + ``None`` if the content type wasn't ``application/json``. + + .. versionchanged:: 2.3 + Raise a 415 error instead of 400. + """ + if e is not None: + raise BadRequest(f"Failed to decode JSON object: {e}") + + raise UnsupportedMediaType( + "Did not attempt to load JSON data because the request" + " Content-Type was not 'application/json'." + ) diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/response.py b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/response.py new file mode 100644 index 0000000000000000000000000000000000000000..c8488094e11afd5b9e785a27ca6a38b5a6ce8d77 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/wrappers/response.py @@ -0,0 +1,835 @@ +from __future__ import annotations + +import json +import typing as t +from http import HTTPStatus +from urllib.parse import urljoin + +from ..datastructures import Headers +from ..http import remove_entity_headers +from ..sansio.response import Response as _SansIOResponse +from ..urls import _invalid_iri_to_uri +from ..urls import iri_to_uri +from ..utils import cached_property +from ..wsgi import ClosingIterator +from ..wsgi import get_current_url +from werkzeug._internal import _get_environ +from werkzeug.http import generate_etag +from werkzeug.http import http_date +from werkzeug.http import is_resource_modified +from werkzeug.http import parse_etags +from werkzeug.http import parse_range_header +from werkzeug.wsgi import _RangeWrapper + +if t.TYPE_CHECKING: + from _typeshed.wsgi import StartResponse + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + from .request import Request + + +def _iter_encoded(iterable: t.Iterable[str | bytes], charset: str) -> t.Iterator[bytes]: + for item in iterable: + if isinstance(item, str): + yield item.encode(charset) + else: + yield item + + +class Response(_SansIOResponse): + """Represents an outgoing WSGI HTTP response with body, status, and + headers. Has properties and methods for using the functionality + defined by various HTTP specs. + + The response body is flexible to support different use cases. The + simple form is passing bytes, or a string which will be encoded as + UTF-8. Passing an iterable of bytes or strings makes this a + streaming response. A generator is particularly useful for building + a CSV file in memory or using SSE (Server Sent Events). A file-like + object is also iterable, although the + :func:`~werkzeug.utils.send_file` helper should be used in that + case. + + The response object is itself a WSGI application callable. When + called (:meth:`__call__`) with ``environ`` and ``start_response``, + it will pass its status and headers to ``start_response`` then + return its body as an iterable. + + .. code-block:: python + + from werkzeug.wrappers.response import Response + + def index(): + return Response("Hello, World!") + + def application(environ, start_response): + path = environ.get("PATH_INFO") or "/" + + if path == "/": + response = index() + else: + response = Response("Not Found", status=404) + + return response(environ, start_response) + + :param response: The data for the body of the response. A string or + bytes, or tuple or list of strings or bytes, for a fixed-length + response, or any other iterable of strings or bytes for a + streaming response. Defaults to an empty body. + :param status: The status code for the response. Either an int, in + which case the default status message is added, or a string in + the form ``{code} {message}``, like ``404 Not Found``. Defaults + to 200. + :param headers: A :class:`~werkzeug.datastructures.Headers` object, + or a list of ``(key, value)`` tuples that will be converted to a + ``Headers`` object. + :param mimetype: The mime type (content type without charset or + other parameters) of the response. If the value starts with + ``text/`` (or matches some other special cases), the charset + will be added to create the ``content_type``. + :param content_type: The full content type of the response. + Overrides building the value from ``mimetype``. + :param direct_passthrough: Pass the response body directly through + as the WSGI iterable. This can be used when the body is a binary + file or other iterator of bytes, to skip some unnecessary + checks. Use :func:`~werkzeug.utils.send_file` instead of setting + this manually. + + .. versionchanged:: 2.1 + Old ``BaseResponse`` and mixin classes were removed. + + .. versionchanged:: 2.0 + Combine ``BaseResponse`` and mixins into a single ``Response`` + class. + + .. versionchanged:: 0.5 + The ``direct_passthrough`` parameter was added. + """ + + #: if set to `False` accessing properties on the response object will + #: not try to consume the response iterator and convert it into a list. + #: + #: .. versionadded:: 0.6.2 + #: + #: That attribute was previously called `implicit_seqence_conversion`. + #: (Notice the typo). If you did use this feature, you have to adapt + #: your code to the name change. + implicit_sequence_conversion = True + + #: If a redirect ``Location`` header is a relative URL, make it an + #: absolute URL, including scheme and domain. + #: + #: .. versionchanged:: 2.1 + #: This is disabled by default, so responses will send relative + #: redirects. + #: + #: .. versionadded:: 0.8 + autocorrect_location_header = False + + #: Should this response object automatically set the content-length + #: header if possible? This is true by default. + #: + #: .. versionadded:: 0.8 + automatically_set_content_length = True + + #: The response body to send as the WSGI iterable. A list of strings + #: or bytes represents a fixed-length response, any other iterable + #: is a streaming response. Strings are encoded to bytes as UTF-8. + #: + #: Do not set to a plain string or bytes, that will cause sending + #: the response to be very inefficient as it will iterate one byte + #: at a time. + response: t.Iterable[str] | t.Iterable[bytes] + + def __init__( + self, + response: t.Iterable[bytes] | bytes | t.Iterable[str] | str | None = None, + status: int | str | HTTPStatus | None = None, + headers: t.Mapping[str, str | t.Iterable[str]] + | t.Iterable[tuple[str, str]] + | None = None, + mimetype: str | None = None, + content_type: str | None = None, + direct_passthrough: bool = False, + ) -> None: + super().__init__( + status=status, + headers=headers, + mimetype=mimetype, + content_type=content_type, + ) + + #: Pass the response body directly through as the WSGI iterable. + #: This can be used when the body is a binary file or other + #: iterator of bytes, to skip some unnecessary checks. Use + #: :func:`~werkzeug.utils.send_file` instead of setting this + #: manually. + self.direct_passthrough = direct_passthrough + self._on_close: list[t.Callable[[], t.Any]] = [] + + # we set the response after the headers so that if a class changes + # the charset attribute, the data is set in the correct charset. + if response is None: + self.response = [] + elif isinstance(response, (str, bytes, bytearray)): + self.set_data(response) + else: + self.response = response + + def call_on_close(self, func: t.Callable[[], t.Any]) -> t.Callable[[], t.Any]: + """Adds a function to the internal list of functions that should + be called as part of closing down the response. Since 0.7 this + function also returns the function that was passed so that this + can be used as a decorator. + + .. versionadded:: 0.6 + """ + self._on_close.append(func) + return func + + def __repr__(self) -> str: + if self.is_sequence: + body_info = f"{sum(map(len, self.iter_encoded()))} bytes" + else: + body_info = "streamed" if self.is_streamed else "likely-streamed" + return f"<{type(self).__name__} {body_info} [{self.status}]>" + + @classmethod + def force_type( + cls, response: Response, environ: WSGIEnvironment | None = None + ) -> Response: + """Enforce that the WSGI response is a response object of the current + type. Werkzeug will use the :class:`Response` internally in many + situations like the exceptions. If you call :meth:`get_response` on an + exception you will get back a regular :class:`Response` object, even + if you are using a custom subclass. + + This method can enforce a given response type, and it will also + convert arbitrary WSGI callables into response objects if an environ + is provided:: + + # convert a Werkzeug response object into an instance of the + # MyResponseClass subclass. + response = MyResponseClass.force_type(response) + + # convert any WSGI application into a response object + response = MyResponseClass.force_type(response, environ) + + This is especially useful if you want to post-process responses in + the main dispatcher and use functionality provided by your subclass. + + Keep in mind that this will modify response objects in place if + possible! + + :param response: a response object or wsgi application. + :param environ: a WSGI environment object. + :return: a response object. + """ + if not isinstance(response, Response): + if environ is None: + raise TypeError( + "cannot convert WSGI application into response" + " objects without an environ" + ) + + from ..test import run_wsgi_app + + response = Response(*run_wsgi_app(response, environ)) + + response.__class__ = cls + return response + + @classmethod + def from_app( + cls, app: WSGIApplication, environ: WSGIEnvironment, buffered: bool = False + ) -> Response: + """Create a new response object from an application output. This + works best if you pass it an application that returns a generator all + the time. Sometimes applications may use the `write()` callable + returned by the `start_response` function. This tries to resolve such + edge cases automatically. But if you don't get the expected output + you should set `buffered` to `True` which enforces buffering. + + :param app: the WSGI application to execute. + :param environ: the WSGI environment to execute against. + :param buffered: set to `True` to enforce buffering. + :return: a response object. + """ + from ..test import run_wsgi_app + + return cls(*run_wsgi_app(app, environ, buffered)) + + @t.overload + def get_data(self, as_text: t.Literal[False] = False) -> bytes: + ... + + @t.overload + def get_data(self, as_text: t.Literal[True]) -> str: + ... + + def get_data(self, as_text: bool = False) -> bytes | str: + """The string representation of the response body. Whenever you call + this property the response iterable is encoded and flattened. This + can lead to unwanted behavior if you stream big data. + + This behavior can be disabled by setting + :attr:`implicit_sequence_conversion` to `False`. + + If `as_text` is set to `True` the return value will be a decoded + string. + + .. versionadded:: 0.9 + """ + self._ensure_sequence() + rv = b"".join(self.iter_encoded()) + + if as_text: + return rv.decode(self._charset) + + return rv + + def set_data(self, value: bytes | str) -> None: + """Sets a new string as response. The value must be a string or + bytes. If a string is set it's encoded to the charset of the + response (utf-8 by default). + + .. versionadded:: 0.9 + """ + if isinstance(value, str): + value = value.encode(self._charset) + self.response = [value] + if self.automatically_set_content_length: + self.headers["Content-Length"] = str(len(value)) + + data = property( + get_data, + set_data, + doc="A descriptor that calls :meth:`get_data` and :meth:`set_data`.", + ) + + def calculate_content_length(self) -> int | None: + """Returns the content length if available or `None` otherwise.""" + try: + self._ensure_sequence() + except RuntimeError: + return None + return sum(len(x) for x in self.iter_encoded()) + + def _ensure_sequence(self, mutable: bool = False) -> None: + """This method can be called by methods that need a sequence. If + `mutable` is true, it will also ensure that the response sequence + is a standard Python list. + + .. versionadded:: 0.6 + """ + if self.is_sequence: + # if we need a mutable object, we ensure it's a list. + if mutable and not isinstance(self.response, list): + self.response = list(self.response) # type: ignore + return + if self.direct_passthrough: + raise RuntimeError( + "Attempted implicit sequence conversion but the" + " response object is in direct passthrough mode." + ) + if not self.implicit_sequence_conversion: + raise RuntimeError( + "The response object required the iterable to be a" + " sequence, but the implicit conversion was disabled." + " Call make_sequence() yourself." + ) + self.make_sequence() + + def make_sequence(self) -> None: + """Converts the response iterator in a list. By default this happens + automatically if required. If `implicit_sequence_conversion` is + disabled, this method is not automatically called and some properties + might raise exceptions. This also encodes all the items. + + .. versionadded:: 0.6 + """ + if not self.is_sequence: + # if we consume an iterable we have to ensure that the close + # method of the iterable is called if available when we tear + # down the response + close = getattr(self.response, "close", None) + self.response = list(self.iter_encoded()) + if close is not None: + self.call_on_close(close) + + def iter_encoded(self) -> t.Iterator[bytes]: + """Iter the response encoded with the encoding of the response. + If the response object is invoked as WSGI application the return + value of this method is used as application iterator unless + :attr:`direct_passthrough` was activated. + """ + # Encode in a separate function so that self.response is fetched + # early. This allows us to wrap the response with the return + # value from get_app_iter or iter_encoded. + return _iter_encoded(self.response, self._charset) + + @property + def is_streamed(self) -> bool: + """If the response is streamed (the response is not an iterable with + a length information) this property is `True`. In this case streamed + means that there is no information about the number of iterations. + This is usually `True` if a generator is passed to the response object. + + This is useful for checking before applying some sort of post + filtering that should not take place for streamed responses. + """ + try: + len(self.response) # type: ignore + except (TypeError, AttributeError): + return True + return False + + @property + def is_sequence(self) -> bool: + """If the iterator is buffered, this property will be `True`. A + response object will consider an iterator to be buffered if the + response attribute is a list or tuple. + + .. versionadded:: 0.6 + """ + return isinstance(self.response, (tuple, list)) + + def close(self) -> None: + """Close the wrapped response if possible. You can also use the object + in a with statement which will automatically close it. + + .. versionadded:: 0.9 + Can now be used in a with statement. + """ + if hasattr(self.response, "close"): + self.response.close() + for func in self._on_close: + func() + + def __enter__(self) -> Response: + return self + + def __exit__(self, exc_type, exc_value, tb): # type: ignore + self.close() + + def freeze(self) -> None: + """Make the response object ready to be pickled. Does the + following: + + * Buffer the response into a list, ignoring + :attr:`implicity_sequence_conversion` and + :attr:`direct_passthrough`. + * Set the ``Content-Length`` header. + * Generate an ``ETag`` header if one is not already set. + + .. versionchanged:: 2.1 + Removed the ``no_etag`` parameter. + + .. versionchanged:: 2.0 + An ``ETag`` header is always added. + + .. versionchanged:: 0.6 + The ``Content-Length`` header is set. + """ + # Always freeze the encoded response body, ignore + # implicit_sequence_conversion and direct_passthrough. + self.response = list(self.iter_encoded()) + self.headers["Content-Length"] = str(sum(map(len, self.response))) + self.add_etag() + + def get_wsgi_headers(self, environ: WSGIEnvironment) -> Headers: + """This is automatically called right before the response is started + and returns headers modified for the given environment. It returns a + copy of the headers from the response with some modifications applied + if necessary. + + For example the location header (if present) is joined with the root + URL of the environment. Also the content length is automatically set + to zero here for certain status codes. + + .. versionchanged:: 0.6 + Previously that function was called `fix_headers` and modified + the response object in place. Also since 0.6, IRIs in location + and content-location headers are handled properly. + + Also starting with 0.6, Werkzeug will attempt to set the content + length if it is able to figure it out on its own. This is the + case if all the strings in the response iterable are already + encoded and the iterable is buffered. + + :param environ: the WSGI environment of the request. + :return: returns a new :class:`~werkzeug.datastructures.Headers` + object. + """ + headers = Headers(self.headers) + location: str | None = None + content_location: str | None = None + content_length: str | int | None = None + status = self.status_code + + # iterate over the headers to find all values in one go. Because + # get_wsgi_headers is used each response that gives us a tiny + # speedup. + for key, value in headers: + ikey = key.lower() + if ikey == "location": + location = value + elif ikey == "content-location": + content_location = value + elif ikey == "content-length": + content_length = value + + if location is not None: + location = _invalid_iri_to_uri(location) + + if self.autocorrect_location_header: + # Make the location header an absolute URL. + current_url = get_current_url(environ, strip_querystring=True) + current_url = iri_to_uri(current_url) + location = urljoin(current_url, location) + + headers["Location"] = location + + # make sure the content location is a URL + if content_location is not None: + headers["Content-Location"] = iri_to_uri(content_location) + + if 100 <= status < 200 or status == 204: + # Per section 3.3.2 of RFC 7230, "a server MUST NOT send a + # Content-Length header field in any response with a status + # code of 1xx (Informational) or 204 (No Content)." + headers.remove("Content-Length") + elif status == 304: + remove_entity_headers(headers) + + # if we can determine the content length automatically, we + # should try to do that. But only if this does not involve + # flattening the iterator or encoding of strings in the + # response. We however should not do that if we have a 304 + # response. + if ( + self.automatically_set_content_length + and self.is_sequence + and content_length is None + and status not in (204, 304) + and not (100 <= status < 200) + ): + content_length = sum(len(x) for x in self.iter_encoded()) + headers["Content-Length"] = str(content_length) + + return headers + + def get_app_iter(self, environ: WSGIEnvironment) -> t.Iterable[bytes]: + """Returns the application iterator for the given environ. Depending + on the request method and the current status code the return value + might be an empty response rather than the one from the response. + + If the request method is `HEAD` or the status code is in a range + where the HTTP specification requires an empty response, an empty + iterable is returned. + + .. versionadded:: 0.6 + + :param environ: the WSGI environment of the request. + :return: a response iterable. + """ + status = self.status_code + if ( + environ["REQUEST_METHOD"] == "HEAD" + or 100 <= status < 200 + or status in (204, 304) + ): + iterable: t.Iterable[bytes] = () + elif self.direct_passthrough: + return self.response # type: ignore + else: + iterable = self.iter_encoded() + return ClosingIterator(iterable, self.close) + + def get_wsgi_response( + self, environ: WSGIEnvironment + ) -> tuple[t.Iterable[bytes], str, list[tuple[str, str]]]: + """Returns the final WSGI response as tuple. The first item in + the tuple is the application iterator, the second the status and + the third the list of headers. The response returned is created + specially for the given environment. For example if the request + method in the WSGI environment is ``'HEAD'`` the response will + be empty and only the headers and status code will be present. + + .. versionadded:: 0.6 + + :param environ: the WSGI environment of the request. + :return: an ``(app_iter, status, headers)`` tuple. + """ + headers = self.get_wsgi_headers(environ) + app_iter = self.get_app_iter(environ) + return app_iter, self.status, headers.to_wsgi_list() + + def __call__( + self, environ: WSGIEnvironment, start_response: StartResponse + ) -> t.Iterable[bytes]: + """Process this response as WSGI application. + + :param environ: the WSGI environment. + :param start_response: the response callable provided by the WSGI + server. + :return: an application iterator + """ + app_iter, status, headers = self.get_wsgi_response(environ) + start_response(status, headers) + return app_iter + + # JSON + + #: A module or other object that has ``dumps`` and ``loads`` + #: functions that match the API of the built-in :mod:`json` module. + json_module = json + + @property + def json(self) -> t.Any | None: + """The parsed JSON data if :attr:`mimetype` indicates JSON + (:mimetype:`application/json`, see :attr:`is_json`). + + Calls :meth:`get_json` with default arguments. + """ + return self.get_json() + + @t.overload + def get_json(self, force: bool = ..., silent: t.Literal[False] = ...) -> t.Any: + ... + + @t.overload + def get_json(self, force: bool = ..., silent: bool = ...) -> t.Any | None: + ... + + def get_json(self, force: bool = False, silent: bool = False) -> t.Any | None: + """Parse :attr:`data` as JSON. Useful during testing. + + If the mimetype does not indicate JSON + (:mimetype:`application/json`, see :attr:`is_json`), this + returns ``None``. + + Unlike :meth:`Request.get_json`, the result is not cached. + + :param force: Ignore the mimetype and always try to parse JSON. + :param silent: Silence parsing errors and return ``None`` + instead. + """ + if not (force or self.is_json): + return None + + data = self.get_data() + + try: + return self.json_module.loads(data) + except ValueError: + if not silent: + raise + + return None + + # Stream + + @cached_property + def stream(self) -> ResponseStream: + """The response iterable as write-only stream.""" + return ResponseStream(self) + + def _wrap_range_response(self, start: int, length: int) -> None: + """Wrap existing Response in case of Range Request context.""" + if self.status_code == 206: + self.response = _RangeWrapper(self.response, start, length) # type: ignore + + def _is_range_request_processable(self, environ: WSGIEnvironment) -> bool: + """Return ``True`` if `Range` header is present and if underlying + resource is considered unchanged when compared with `If-Range` header. + """ + return ( + "HTTP_IF_RANGE" not in environ + or not is_resource_modified( + environ, + self.headers.get("etag"), + None, + self.headers.get("last-modified"), + ignore_if_range=False, + ) + ) and "HTTP_RANGE" in environ + + def _process_range_request( + self, + environ: WSGIEnvironment, + complete_length: int | None, + accept_ranges: bool | str, + ) -> bool: + """Handle Range Request related headers (RFC7233). If `Accept-Ranges` + header is valid, and Range Request is processable, we set the headers + as described by the RFC, and wrap the underlying response in a + RangeWrapper. + + Returns ``True`` if Range Request can be fulfilled, ``False`` otherwise. + + :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` + if `Range` header could not be parsed or satisfied. + + .. versionchanged:: 2.0 + Returns ``False`` if the length is 0. + """ + from ..exceptions import RequestedRangeNotSatisfiable + + if ( + not accept_ranges + or complete_length is None + or complete_length == 0 + or not self._is_range_request_processable(environ) + ): + return False + + if accept_ranges is True: + accept_ranges = "bytes" + + parsed_range = parse_range_header(environ.get("HTTP_RANGE")) + + if parsed_range is None: + raise RequestedRangeNotSatisfiable(complete_length) + + range_tuple = parsed_range.range_for_length(complete_length) + content_range_header = parsed_range.to_content_range_header(complete_length) + + if range_tuple is None or content_range_header is None: + raise RequestedRangeNotSatisfiable(complete_length) + + content_length = range_tuple[1] - range_tuple[0] + self.headers["Content-Length"] = str(content_length) + self.headers["Accept-Ranges"] = accept_ranges + self.content_range = content_range_header # type: ignore + self.status_code = 206 + self._wrap_range_response(range_tuple[0], content_length) + return True + + def make_conditional( + self, + request_or_environ: WSGIEnvironment | Request, + accept_ranges: bool | str = False, + complete_length: int | None = None, + ) -> Response: + """Make the response conditional to the request. This method works + best if an etag was defined for the response already. The `add_etag` + method can be used to do that. If called without etag just the date + header is set. + + This does nothing if the request method in the request or environ is + anything but GET or HEAD. + + For optimal performance when handling range requests, it's recommended + that your response data object implements `seekable`, `seek` and `tell` + methods as described by :py:class:`io.IOBase`. Objects returned by + :meth:`~werkzeug.wsgi.wrap_file` automatically implement those methods. + + It does not remove the body of the response because that's something + the :meth:`__call__` function does for us automatically. + + Returns self so that you can do ``return resp.make_conditional(req)`` + but modifies the object in-place. + + :param request_or_environ: a request object or WSGI environment to be + used to make the response conditional + against. + :param accept_ranges: This parameter dictates the value of + `Accept-Ranges` header. If ``False`` (default), + the header is not set. If ``True``, it will be set + to ``"bytes"``. If it's a string, it will use this + value. + :param complete_length: Will be used only in valid Range Requests. + It will set `Content-Range` complete length + value and compute `Content-Length` real value. + This parameter is mandatory for successful + Range Requests completion. + :raises: :class:`~werkzeug.exceptions.RequestedRangeNotSatisfiable` + if `Range` header could not be parsed or satisfied. + + .. versionchanged:: 2.0 + Range processing is skipped if length is 0 instead of + raising a 416 Range Not Satisfiable error. + """ + environ = _get_environ(request_or_environ) + if environ["REQUEST_METHOD"] in ("GET", "HEAD"): + # if the date is not in the headers, add it now. We however + # will not override an already existing header. Unfortunately + # this header will be overridden by many WSGI servers including + # wsgiref. + if "date" not in self.headers: + self.headers["Date"] = http_date() + is206 = self._process_range_request(environ, complete_length, accept_ranges) + if not is206 and not is_resource_modified( + environ, + self.headers.get("etag"), + None, + self.headers.get("last-modified"), + ): + if parse_etags(environ.get("HTTP_IF_MATCH")): + self.status_code = 412 + else: + self.status_code = 304 + if ( + self.automatically_set_content_length + and "content-length" not in self.headers + ): + length = self.calculate_content_length() + if length is not None: + self.headers["Content-Length"] = str(length) + return self + + def add_etag(self, overwrite: bool = False, weak: bool = False) -> None: + """Add an etag for the current response if there is none yet. + + .. versionchanged:: 2.0 + SHA-1 is used to generate the value. MD5 may not be + available in some environments. + """ + if overwrite or "etag" not in self.headers: + self.set_etag(generate_etag(self.get_data()), weak) + + +class ResponseStream: + """A file descriptor like object used by :meth:`Response.stream` to + represent the body of the stream. It directly pushes into the + response iterable of the response object. + """ + + mode = "wb+" + + def __init__(self, response: Response): + self.response = response + self.closed = False + + def write(self, value: bytes) -> int: + if self.closed: + raise ValueError("I/O operation on closed file") + self.response._ensure_sequence(mutable=True) + self.response.response.append(value) # type: ignore + self.response.headers.pop("Content-Length", None) + return len(value) + + def writelines(self, seq: t.Iterable[bytes]) -> None: + for item in seq: + self.write(item) + + def close(self) -> None: + self.closed = True + + def flush(self) -> None: + if self.closed: + raise ValueError("I/O operation on closed file") + + def isatty(self) -> bool: + if self.closed: + raise ValueError("I/O operation on closed file") + return False + + def tell(self) -> int: + self.response._ensure_sequence() + return sum(map(len, self.response.response)) + + @property + def encoding(self) -> str: + return self.response._charset diff --git a/backend/test/lib/python3.8/site-packages/werkzeug/wsgi.py b/backend/test/lib/python3.8/site-packages/werkzeug/wsgi.py new file mode 100644 index 0000000000000000000000000000000000000000..6061e114114b8e38f2b2784274913fd3b512eae2 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/werkzeug/wsgi.py @@ -0,0 +1,847 @@ +from __future__ import annotations + +import io +import re +import typing as t +import warnings +from functools import partial +from functools import update_wrapper +from itertools import chain + +from ._internal import _make_encode_wrapper +from ._internal import _to_bytes +from ._internal import _to_str +from .exceptions import ClientDisconnected +from .exceptions import RequestEntityTooLarge +from .sansio import utils as _sansio_utils +from .sansio.utils import host_is_trusted # noqa: F401 # Imported as part of API + +if t.TYPE_CHECKING: + from _typeshed.wsgi import WSGIApplication + from _typeshed.wsgi import WSGIEnvironment + + +def responder(f: t.Callable[..., WSGIApplication]) -> WSGIApplication: + """Marks a function as responder. Decorate a function with it and it + will automatically call the return value as WSGI application. + + Example:: + + @responder + def application(environ, start_response): + return Response('Hello World!') + """ + return update_wrapper(lambda *a: f(*a)(*a[-2:]), f) + + +def get_current_url( + environ: WSGIEnvironment, + root_only: bool = False, + strip_querystring: bool = False, + host_only: bool = False, + trusted_hosts: t.Iterable[str] | None = None, +) -> str: + """Recreate the URL for a request from the parts in a WSGI + environment. + + The URL is an IRI, not a URI, so it may contain Unicode characters. + Use :func:`~werkzeug.urls.iri_to_uri` to convert it to ASCII. + + :param environ: The WSGI environment to get the URL parts from. + :param root_only: Only build the root path, don't include the + remaining path or query string. + :param strip_querystring: Don't include the query string. + :param host_only: Only build the scheme and host. + :param trusted_hosts: A list of trusted host names to validate the + host against. + """ + parts = { + "scheme": environ["wsgi.url_scheme"], + "host": get_host(environ, trusted_hosts), + } + + if not host_only: + parts["root_path"] = environ.get("SCRIPT_NAME", "") + + if not root_only: + parts["path"] = environ.get("PATH_INFO", "") + + if not strip_querystring: + parts["query_string"] = environ.get("QUERY_STRING", "").encode("latin1") + + return _sansio_utils.get_current_url(**parts) + + +def _get_server( + environ: WSGIEnvironment, +) -> tuple[str, int | None] | None: + name = environ.get("SERVER_NAME") + + if name is None: + return None + + try: + port: int | None = int(environ.get("SERVER_PORT", None)) + except (TypeError, ValueError): + # unix socket + port = None + + return name, port + + +def get_host( + environ: WSGIEnvironment, trusted_hosts: t.Iterable[str] | None = None +) -> str: + """Return the host for the given WSGI environment. + + The ``Host`` header is preferred, then ``SERVER_NAME`` if it's not + set. The returned host will only contain the port if it is different + than the standard port for the protocol. + + Optionally, verify that the host is trusted using + :func:`host_is_trusted` and raise a + :exc:`~werkzeug.exceptions.SecurityError` if it is not. + + :param environ: A WSGI environment dict. + :param trusted_hosts: A list of trusted host names. + + :return: Host, with port if necessary. + :raise ~werkzeug.exceptions.SecurityError: If the host is not + trusted. + """ + return _sansio_utils.get_host( + environ["wsgi.url_scheme"], + environ.get("HTTP_HOST"), + _get_server(environ), + trusted_hosts, + ) + + +def get_content_length(environ: WSGIEnvironment) -> int | None: + """Return the ``Content-Length`` header value as an int. If the header is not given + or the ``Transfer-Encoding`` header is ``chunked``, ``None`` is returned to indicate + a streaming request. If the value is not an integer, or negative, 0 is returned. + + :param environ: The WSGI environ to get the content length from. + + .. versionadded:: 0.9 + """ + return _sansio_utils.get_content_length( + http_content_length=environ.get("CONTENT_LENGTH"), + http_transfer_encoding=environ.get("HTTP_TRANSFER_ENCODING"), + ) + + +def get_input_stream( + environ: WSGIEnvironment, + safe_fallback: bool = True, + max_content_length: int | None = None, +) -> t.IO[bytes]: + """Return the WSGI input stream, wrapped so that it may be read safely without going + past the ``Content-Length`` header value or ``max_content_length``. + + If ``Content-Length`` exceeds ``max_content_length``, a + :exc:`RequestEntityTooLarge`` ``413 Content Too Large`` error is raised. + + If the WSGI server sets ``environ["wsgi.input_terminated"]``, it indicates that the + server handles terminating the stream, so it is safe to read directly. For example, + a server that knows how to handle chunked requests safely would set this. + + If ``max_content_length`` is set, it can be enforced on streams if + ``wsgi.input_terminated`` is set. Otherwise, an empty stream is returned unless the + user explicitly disables this safe fallback. + + If the limit is reached before the underlying stream is exhausted (such as a file + that is too large, or an infinite stream), the remaining contents of the stream + cannot be read safely. Depending on how the server handles this, clients may show a + "connection reset" failure instead of seeing the 413 response. + + :param environ: The WSGI environ containing the stream. + :param safe_fallback: Return an empty stream when ``Content-Length`` is not set. + Disabling this allows infinite streams, which can be a denial-of-service risk. + :param max_content_length: The maximum length that content-length or streaming + requests may not exceed. + + .. versionchanged:: 2.3.2 + ``max_content_length`` is only applied to streaming requests if the server sets + ``wsgi.input_terminated``. + + .. versionchanged:: 2.3 + Check ``max_content_length`` and raise an error if it is exceeded. + + .. versionadded:: 0.9 + """ + stream = t.cast(t.IO[bytes], environ["wsgi.input"]) + content_length = get_content_length(environ) + + if content_length is not None and max_content_length is not None: + if content_length > max_content_length: + raise RequestEntityTooLarge() + + # A WSGI server can set this to indicate that it terminates the input stream. In + # that case the stream is safe without wrapping, or can enforce a max length. + if "wsgi.input_terminated" in environ: + if max_content_length is not None: + # If this is moved above, it can cause the stream to hang if a read attempt + # is made when the client sends no data. For example, the development server + # does not handle buffering except for chunked encoding. + return t.cast( + t.IO[bytes], LimitedStream(stream, max_content_length, is_max=True) + ) + + return stream + + # No limit given, return an empty stream unless the user explicitly allows the + # potentially infinite stream. An infinite stream is dangerous if it's not expected, + # as it can tie up a worker indefinitely. + if content_length is None: + return io.BytesIO() if safe_fallback else stream + + return t.cast(t.IO[bytes], LimitedStream(stream, content_length)) + + +def get_path_info( + environ: WSGIEnvironment, + charset: t.Any = ..., + errors: str | None = None, +) -> str: + """Return ``PATH_INFO`` from the WSGI environment. + + :param environ: WSGI environment to get the path from. + + .. versionchanged:: 2.3 + The ``charset`` and ``errors`` parameters are deprecated and will be removed in + Werkzeug 3.0. + + .. versionadded:: 0.9 + """ + if charset is not ...: + warnings.warn( + "The 'charset' parameter is deprecated and will be removed" + " in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + if charset is None: + charset = "utf-8" + else: + charset = "utf-8" + + if errors is not None: + warnings.warn( + "The 'errors' parameter is deprecated and will be removed in Werkzeug 3.0", + DeprecationWarning, + stacklevel=2, + ) + else: + errors = "replace" + + path = environ.get("PATH_INFO", "").encode("latin1") + return path.decode(charset, errors) # type: ignore[no-any-return] + + +class ClosingIterator: + """The WSGI specification requires that all middlewares and gateways + respect the `close` callback of the iterable returned by the application. + Because it is useful to add another close action to a returned iterable + and adding a custom iterable is a boring task this class can be used for + that:: + + return ClosingIterator(app(environ, start_response), [cleanup_session, + cleanup_locals]) + + If there is just one close function it can be passed instead of the list. + + A closing iterator is not needed if the application uses response objects + and finishes the processing if the response is started:: + + try: + return response(environ, start_response) + finally: + cleanup_session() + cleanup_locals() + """ + + def __init__( + self, + iterable: t.Iterable[bytes], + callbacks: None + | (t.Callable[[], None] | t.Iterable[t.Callable[[], None]]) = None, + ) -> None: + iterator = iter(iterable) + self._next = t.cast(t.Callable[[], bytes], partial(next, iterator)) + if callbacks is None: + callbacks = [] + elif callable(callbacks): + callbacks = [callbacks] + else: + callbacks = list(callbacks) + iterable_close = getattr(iterable, "close", None) + if iterable_close: + callbacks.insert(0, iterable_close) + self._callbacks = callbacks + + def __iter__(self) -> ClosingIterator: + return self + + def __next__(self) -> bytes: + return self._next() + + def close(self) -> None: + for callback in self._callbacks: + callback() + + +def wrap_file( + environ: WSGIEnvironment, file: t.IO[bytes], buffer_size: int = 8192 +) -> t.Iterable[bytes]: + """Wraps a file. This uses the WSGI server's file wrapper if available + or otherwise the generic :class:`FileWrapper`. + + .. versionadded:: 0.5 + + If the file wrapper from the WSGI server is used it's important to not + iterate over it from inside the application but to pass it through + unchanged. If you want to pass out a file wrapper inside a response + object you have to set :attr:`Response.direct_passthrough` to `True`. + + More information about file wrappers are available in :pep:`333`. + + :param file: a :class:`file`-like object with a :meth:`~file.read` method. + :param buffer_size: number of bytes for one iteration. + """ + return environ.get("wsgi.file_wrapper", FileWrapper)( # type: ignore + file, buffer_size + ) + + +class FileWrapper: + """This class can be used to convert a :class:`file`-like object into + an iterable. It yields `buffer_size` blocks until the file is fully + read. + + You should not use this class directly but rather use the + :func:`wrap_file` function that uses the WSGI server's file wrapper + support if it's available. + + .. versionadded:: 0.5 + + If you're using this object together with a :class:`Response` you have + to use the `direct_passthrough` mode. + + :param file: a :class:`file`-like object with a :meth:`~file.read` method. + :param buffer_size: number of bytes for one iteration. + """ + + def __init__(self, file: t.IO[bytes], buffer_size: int = 8192) -> None: + self.file = file + self.buffer_size = buffer_size + + def close(self) -> None: + if hasattr(self.file, "close"): + self.file.close() + + def seekable(self) -> bool: + if hasattr(self.file, "seekable"): + return self.file.seekable() + if hasattr(self.file, "seek"): + return True + return False + + def seek(self, *args: t.Any) -> None: + if hasattr(self.file, "seek"): + self.file.seek(*args) + + def tell(self) -> int | None: + if hasattr(self.file, "tell"): + return self.file.tell() + return None + + def __iter__(self) -> FileWrapper: + return self + + def __next__(self) -> bytes: + data = self.file.read(self.buffer_size) + if data: + return data + raise StopIteration() + + +class _RangeWrapper: + # private for now, but should we make it public in the future ? + + """This class can be used to convert an iterable object into + an iterable that will only yield a piece of the underlying content. + It yields blocks until the underlying stream range is fully read. + The yielded blocks will have a size that can't exceed the original + iterator defined block size, but that can be smaller. + + If you're using this object together with a :class:`Response` you have + to use the `direct_passthrough` mode. + + :param iterable: an iterable object with a :meth:`__next__` method. + :param start_byte: byte from which read will start. + :param byte_range: how many bytes to read. + """ + + def __init__( + self, + iterable: t.Iterable[bytes] | t.IO[bytes], + start_byte: int = 0, + byte_range: int | None = None, + ): + self.iterable = iter(iterable) + self.byte_range = byte_range + self.start_byte = start_byte + self.end_byte = None + + if byte_range is not None: + self.end_byte = start_byte + byte_range + + self.read_length = 0 + self.seekable = hasattr(iterable, "seekable") and iterable.seekable() + self.end_reached = False + + def __iter__(self) -> _RangeWrapper: + return self + + def _next_chunk(self) -> bytes: + try: + chunk = next(self.iterable) + self.read_length += len(chunk) + return chunk + except StopIteration: + self.end_reached = True + raise + + def _first_iteration(self) -> tuple[bytes | None, int]: + chunk = None + if self.seekable: + self.iterable.seek(self.start_byte) # type: ignore + self.read_length = self.iterable.tell() # type: ignore + contextual_read_length = self.read_length + else: + while self.read_length <= self.start_byte: + chunk = self._next_chunk() + if chunk is not None: + chunk = chunk[self.start_byte - self.read_length :] + contextual_read_length = self.start_byte + return chunk, contextual_read_length + + def _next(self) -> bytes: + if self.end_reached: + raise StopIteration() + chunk = None + contextual_read_length = self.read_length + if self.read_length == 0: + chunk, contextual_read_length = self._first_iteration() + if chunk is None: + chunk = self._next_chunk() + if self.end_byte is not None and self.read_length >= self.end_byte: + self.end_reached = True + return chunk[: self.end_byte - contextual_read_length] + return chunk + + def __next__(self) -> bytes: + chunk = self._next() + if chunk: + return chunk + self.end_reached = True + raise StopIteration() + + def close(self) -> None: + if hasattr(self.iterable, "close"): + self.iterable.close() + + +def _make_chunk_iter( + stream: t.Iterable[bytes] | t.IO[bytes], + limit: int | None, + buffer_size: int, +) -> t.Iterator[bytes]: + """Helper for the line and chunk iter functions.""" + warnings.warn( + "'_make_chunk_iter' is deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + + if isinstance(stream, (bytes, bytearray, str)): + raise TypeError( + "Passed a string or byte object instead of true iterator or stream." + ) + if not hasattr(stream, "read"): + for item in stream: + if item: + yield item + return + stream = t.cast(t.IO[bytes], stream) + if not isinstance(stream, LimitedStream) and limit is not None: + stream = t.cast(t.IO[bytes], LimitedStream(stream, limit)) + _read = stream.read + while True: + item = _read(buffer_size) + if not item: + break + yield item + + +def make_line_iter( + stream: t.Iterable[bytes] | t.IO[bytes], + limit: int | None = None, + buffer_size: int = 10 * 1024, + cap_at_buffer: bool = False, +) -> t.Iterator[bytes]: + """Safely iterates line-based over an input stream. If the input stream + is not a :class:`LimitedStream` the `limit` parameter is mandatory. + + This uses the stream's :meth:`~file.read` method internally as opposite + to the :meth:`~file.readline` method that is unsafe and can only be used + in violation of the WSGI specification. The same problem applies to the + `__iter__` function of the input stream which calls :meth:`~file.readline` + without arguments. + + If you need line-by-line processing it's strongly recommended to iterate + over the input stream using this helper function. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. + + .. versionadded:: 0.11 + added support for the `cap_at_buffer` parameter. + + .. versionadded:: 0.9 + added support for iterators as input stream. + + .. versionchanged:: 0.8 + This function now ensures that the limit was reached. + + :param stream: the stream or iterate to iterate over. + :param limit: the limit in bytes for the stream. (Usually + content length. Not necessary if the `stream` + is a :class:`LimitedStream`. + :param buffer_size: The optional buffer size. + :param cap_at_buffer: if this is set chunks are split if they are longer + than the buffer size. Internally this is implemented + that the buffer size might be exhausted by a factor + of two however. + """ + warnings.warn( + "'make_line_iter' is deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + _iter = _make_chunk_iter(stream, limit, buffer_size) + + first_item = next(_iter, "") + + if not first_item: + return + + s = _make_encode_wrapper(first_item) + empty = t.cast(bytes, s("")) + cr = t.cast(bytes, s("\r")) + lf = t.cast(bytes, s("\n")) + crlf = t.cast(bytes, s("\r\n")) + + _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) + + def _iter_basic_lines() -> t.Iterator[bytes]: + _join = empty.join + buffer: list[bytes] = [] + while True: + new_data = next(_iter, "") + if not new_data: + break + new_buf: list[bytes] = [] + buf_size = 0 + for item in t.cast( + t.Iterator[bytes], chain(buffer, new_data.splitlines(True)) + ): + new_buf.append(item) + buf_size += len(item) + if item and item[-1:] in crlf: + yield _join(new_buf) + new_buf = [] + elif cap_at_buffer and buf_size >= buffer_size: + rv = _join(new_buf) + while len(rv) >= buffer_size: + yield rv[:buffer_size] + rv = rv[buffer_size:] + new_buf = [rv] + buffer = new_buf + if buffer: + yield _join(buffer) + + # This hackery is necessary to merge 'foo\r' and '\n' into one item + # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. + previous = empty + for item in _iter_basic_lines(): + if item == lf and previous[-1:] == cr: + previous += item + item = empty + if previous: + yield previous + previous = item + if previous: + yield previous + + +def make_chunk_iter( + stream: t.Iterable[bytes] | t.IO[bytes], + separator: bytes, + limit: int | None = None, + buffer_size: int = 10 * 1024, + cap_at_buffer: bool = False, +) -> t.Iterator[bytes]: + """Works like :func:`make_line_iter` but accepts a separator + which divides chunks. If you want newline based processing + you should use :func:`make_line_iter` instead as it + supports arbitrary newline markers. + + .. deprecated:: 2.3 + Will be removed in Werkzeug 3.0. + + .. versionchanged:: 0.11 + added support for the `cap_at_buffer` parameter. + + .. versionchanged:: 0.9 + added support for iterators as input stream. + + .. versionadded:: 0.8 + + :param stream: the stream or iterate to iterate over. + :param separator: the separator that divides chunks. + :param limit: the limit in bytes for the stream. (Usually + content length. Not necessary if the `stream` + is otherwise already limited). + :param buffer_size: The optional buffer size. + :param cap_at_buffer: if this is set chunks are split if they are longer + than the buffer size. Internally this is implemented + that the buffer size might be exhausted by a factor + of two however. + """ + warnings.warn( + "'make_chunk_iter' is deprecated and will be removed in Werkzeug 3.0.", + DeprecationWarning, + stacklevel=2, + ) + _iter = _make_chunk_iter(stream, limit, buffer_size) + + first_item = next(_iter, b"") + + if not first_item: + return + + _iter = t.cast(t.Iterator[bytes], chain((first_item,), _iter)) + if isinstance(first_item, str): + separator = _to_str(separator) + _split = re.compile(f"({re.escape(separator)})").split + _join = "".join + else: + separator = _to_bytes(separator) + _split = re.compile(b"(" + re.escape(separator) + b")").split + _join = b"".join + + buffer: list[bytes] = [] + while True: + new_data = next(_iter, b"") + if not new_data: + break + chunks = _split(new_data) + new_buf: list[bytes] = [] + buf_size = 0 + for item in chain(buffer, chunks): + if item == separator: + yield _join(new_buf) + new_buf = [] + buf_size = 0 + else: + buf_size += len(item) + new_buf.append(item) + + if cap_at_buffer and buf_size >= buffer_size: + rv = _join(new_buf) + while len(rv) >= buffer_size: + yield rv[:buffer_size] + rv = rv[buffer_size:] + new_buf = [rv] + buf_size = len(rv) + + buffer = new_buf + if buffer: + yield _join(buffer) + + +class LimitedStream(io.RawIOBase): + """Wrap a stream so that it doesn't read more than a given limit. This is used to + limit ``wsgi.input`` to the ``Content-Length`` header value or + :attr:`.Request.max_content_length`. + + When attempting to read after the limit has been reached, :meth:`on_exhausted` is + called. When the limit is a maximum, this raises :exc:`.RequestEntityTooLarge`. + + If reading from the stream returns zero bytes or raises an error, + :meth:`on_disconnect` is called, which raises :exc:`.ClientDisconnected`. When the + limit is a maximum and zero bytes were read, no error is raised, since it may be the + end of the stream. + + If the limit is reached before the underlying stream is exhausted (such as a file + that is too large, or an infinite stream), the remaining contents of the stream + cannot be read safely. Depending on how the server handles this, clients may show a + "connection reset" failure instead of seeing the 413 response. + + :param stream: The stream to read from. Must be a readable binary IO object. + :param limit: The limit in bytes to not read past. Should be either the + ``Content-Length`` header value or ``request.max_content_length``. + :param is_max: Whether the given ``limit`` is ``request.max_content_length`` instead + of the ``Content-Length`` header value. This changes how exhausted and + disconnect events are handled. + + .. versionchanged:: 2.3 + Handle ``max_content_length`` differently than ``Content-Length``. + + .. versionchanged:: 2.3 + Implements ``io.RawIOBase`` rather than ``io.IOBase``. + """ + + def __init__(self, stream: t.IO[bytes], limit: int, is_max: bool = False) -> None: + self._stream = stream + self._pos = 0 + self.limit = limit + self._limit_is_max = is_max + + @property + def is_exhausted(self) -> bool: + """Whether the current stream position has reached the limit.""" + return self._pos >= self.limit + + def on_exhausted(self) -> None: + """Called when attempting to read after the limit has been reached. + + The default behavior is to do nothing, unless the limit is a maximum, in which + case it raises :exc:`.RequestEntityTooLarge`. + + .. versionchanged:: 2.3 + Raises ``RequestEntityTooLarge`` if the limit is a maximum. + + .. versionchanged:: 2.3 + Any return value is ignored. + """ + if self._limit_is_max: + raise RequestEntityTooLarge() + + def on_disconnect(self, error: Exception | None = None) -> None: + """Called when an attempted read receives zero bytes before the limit was + reached. This indicates that the client disconnected before sending the full + request body. + + The default behavior is to raise :exc:`.ClientDisconnected`, unless the limit is + a maximum and no error was raised. + + .. versionchanged:: 2.3 + Added the ``error`` parameter. Do nothing if the limit is a maximum and no + error was raised. + + .. versionchanged:: 2.3 + Any return value is ignored. + """ + if not self._limit_is_max or error is not None: + raise ClientDisconnected() + + # If the limit is a maximum, then we may have read zero bytes because the + # streaming body is complete. There's no way to distinguish that from the + # client disconnecting early. + + def exhaust(self) -> bytes: + """Exhaust the stream by reading until the limit is reached or the client + disconnects, returning the remaining data. + + .. versionchanged:: 2.3 + Return the remaining data. + + .. versionchanged:: 2.2.3 + Handle case where wrapped stream returns fewer bytes than requested. + """ + if not self.is_exhausted: + return self.readall() + + return b"" + + def readinto(self, b: bytearray) -> int | None: # type: ignore[override] + size = len(b) + remaining = self.limit - self._pos + + if remaining <= 0: + self.on_exhausted() + return 0 + + if hasattr(self._stream, "readinto"): + # Use stream.readinto if it's available. + if size <= remaining: + # The size fits in the remaining limit, use the buffer directly. + try: + out_size: int | None = self._stream.readinto(b) + except (OSError, ValueError) as e: + self.on_disconnect(error=e) + return 0 + else: + # Use a temp buffer with the remaining limit as the size. + temp_b = bytearray(remaining) + + try: + out_size = self._stream.readinto(temp_b) + except (OSError, ValueError) as e: + self.on_disconnect(error=e) + return 0 + + if out_size: + b[:out_size] = temp_b + else: + # WSGI requires that stream.read is available. + try: + data = self._stream.read(min(size, remaining)) + except (OSError, ValueError) as e: + self.on_disconnect(error=e) + return 0 + + out_size = len(data) + b[:out_size] = data + + if not out_size: + # Read zero bytes from the stream. + self.on_disconnect() + return 0 + + self._pos += out_size + return out_size + + def readall(self) -> bytes: + if self.is_exhausted: + self.on_exhausted() + return b"" + + out = bytearray() + + # The parent implementation uses "while True", which results in an extra read. + while not self.is_exhausted: + data = self.read(1024 * 64) + + # Stream may return empty before a max limit is reached. + if not data: + break + + out.extend(data) + + return bytes(out) + + def tell(self) -> int: + """Return the current stream position. + + .. versionadded:: 0.9 + """ + return self._pos + + def readable(self) -> bool: + return True diff --git a/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/INSTALLER b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/LICENSE b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..1bb5a44356f00884a71ceeefd24ded6caaba2418 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/LICENSE @@ -0,0 +1,17 @@ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/METADATA b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..9bf865c9e8e26c5242cb6a9bbb8b7a7d6eb44f71 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/METADATA @@ -0,0 +1,103 @@ +Metadata-Version: 2.1 +Name: zipp +Version: 3.16.2 +Summary: Backport of pathlib-compatible object wrapper for zip files +Home-page: https://github.com/jaraco/zipp +Author: Jason R. Coombs +Author-email: jaraco@jaraco.com +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Requires-Python: >=3.8 +License-File: LICENSE +Provides-Extra: docs +Requires-Dist: sphinx (>=3.5) ; extra == 'docs' +Requires-Dist: jaraco.packaging (>=9.3) ; extra == 'docs' +Requires-Dist: rst.linker (>=1.9) ; extra == 'docs' +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx-lint ; extra == 'docs' +Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs' +Provides-Extra: testing +Requires-Dist: pytest (>=6) ; extra == 'testing' +Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing' +Requires-Dist: pytest-cov ; extra == 'testing' +Requires-Dist: pytest-enabler (>=2.2) ; extra == 'testing' +Requires-Dist: pytest-ruff ; extra == 'testing' +Requires-Dist: jaraco.itertools ; extra == 'testing' +Requires-Dist: jaraco.functools ; extra == 'testing' +Requires-Dist: more-itertools ; extra == 'testing' +Requires-Dist: big-O ; extra == 'testing' +Requires-Dist: pytest-ignore-flaky ; extra == 'testing' +Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing' +Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing' + +.. image:: https://img.shields.io/pypi/v/zipp.svg + :target: https://pypi.org/project/zipp + +.. image:: https://img.shields.io/pypi/pyversions/zipp.svg + +.. image:: https://github.com/jaraco/zipp/workflows/tests/badge.svg + :target: https://github.com/jaraco/zipp/actions?query=workflow%3A%22tests%22 + :alt: tests + +.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json + :target: https://github.com/astral-sh/ruff + :alt: Ruff + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Code style: Black + +.. .. image:: https://readthedocs.org/projects/PROJECT_RTD/badge/?version=latest +.. :target: https://PROJECT_RTD.readthedocs.io/en/latest/?badge=latest + +.. image:: https://img.shields.io/badge/skeleton-2023-informational + :target: https://blog.jaraco.com/skeleton + +.. image:: https://tidelift.com/badges/package/pypi/zipp + :target: https://tidelift.com/subscription/pkg/pypi-zipp?utm_source=pypi-zipp&utm_medium=readme + + +A pathlib-compatible Zipfile object wrapper. Official backport of the standard library +`Path object <https://docs.python.org/3.8/library/zipfile.html#path-objects>`_. + + +Compatibility +============= + +New features are introduced in this third-party library and later merged +into CPython. The following table indicates which versions of this library +were contributed to different versions in the standard library: + +.. list-table:: + :header-rows: 1 + + * - zipp + - stdlib + * - 3.15 + - 3.12 + * - 3.5 + - 3.11 + * - 3.2 + - 3.10 + * - 3.3 ?? + - 3.9 + * - 1.0 + - 3.8 + + +Usage +===== + +Use ``zipp.Path`` in place of ``zipfile.Path`` on any Python. + +For Enterprise +============== + +Available as part of the Tidelift Subscription. + +This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use. + +`Learn more <https://tidelift.com/subscription/pkg/pypi-zipp?utm_source=pypi-zipp&utm_medium=referral&utm_campaign=github>`_. diff --git a/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/RECORD b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..cc1253b0bdd939ef363a907de2ddbc21fef3e984 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/RECORD @@ -0,0 +1,12 @@ +zipp-3.16.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +zipp-3.16.2.dist-info/LICENSE,sha256=htoPAa6uRjSKPD1GUZXcHOzN55956HdppkuNoEsqR0E,1023 +zipp-3.16.2.dist-info/METADATA,sha256=XcRAj0l4G0DNhBVVNhBeKh6RUA_T1h71cXVwSEri6H8,3711 +zipp-3.16.2.dist-info/RECORD,, +zipp-3.16.2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92 +zipp-3.16.2.dist-info/top_level.txt,sha256=iAbdoSHfaGqBfVb2XuR9JqSQHCoOsOtG6y9C_LSpqFw,5 +zipp/__init__.py,sha256=gSWnY3jpdldbD1Gaio8iJWLxlJm58EBDieTkx2UZnds,10453 +zipp/__pycache__/__init__.cpython-38.pyc,, +zipp/__pycache__/glob.cpython-38.pyc,, +zipp/__pycache__/py310compat.cpython-38.pyc,, +zipp/glob.py,sha256=cCDTdWacJXh5tbEnjnZJ71HL_hbprvln5aylHMoR-JM,893 +zipp/py310compat.py,sha256=eZpkW0zRtunkhEh8jjX3gCGe22emoKCBJw72Zt4RkhA,219 diff --git a/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/WHEEL b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..1f37c02f2eb2e26b306202feaccb31e522b8b169 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.40.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/top_level.txt b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..e82f676f82a3381fa909d1e6578c7a22044fafca --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/zipp-3.16.2.dist-info/top_level.txt @@ -0,0 +1 @@ +zipp diff --git a/backend/test/lib/python3.8/site-packages/zipp/__init__.py b/backend/test/lib/python3.8/site-packages/zipp/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3354c2bb9d715129918b9998cc0785ec95f9d472 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/zipp/__init__.py @@ -0,0 +1,396 @@ +import io +import posixpath +import zipfile +import itertools +import contextlib +import pathlib +import re + +from .py310compat import text_encoding +from .glob import translate + + +__all__ = ['Path'] + + +def _parents(path): + """ + Given a path with elements separated by + posixpath.sep, generate all parents of that path. + + >>> list(_parents('b/d')) + ['b'] + >>> list(_parents('/b/d/')) + ['/b'] + >>> list(_parents('b/d/f/')) + ['b/d', 'b'] + >>> list(_parents('b')) + [] + >>> list(_parents('')) + [] + """ + return itertools.islice(_ancestry(path), 1, None) + + +def _ancestry(path): + """ + Given a path with elements separated by + posixpath.sep, generate all elements of that path + + >>> list(_ancestry('b/d')) + ['b/d', 'b'] + >>> list(_ancestry('/b/d/')) + ['/b/d', '/b'] + >>> list(_ancestry('b/d/f/')) + ['b/d/f', 'b/d', 'b'] + >>> list(_ancestry('b')) + ['b'] + >>> list(_ancestry('')) + [] + """ + path = path.rstrip(posixpath.sep) + while path and path != posixpath.sep: + yield path + path, tail = posixpath.split(path) + + +_dedupe = dict.fromkeys +"""Deduplicate an iterable in original order""" + + +def _difference(minuend, subtrahend): + """ + Return items in minuend not in subtrahend, retaining order + with O(1) lookup. + """ + return itertools.filterfalse(set(subtrahend).__contains__, minuend) + + +class InitializedState: + """ + Mix-in to save the initialization state for pickling. + """ + + def __init__(self, *args, **kwargs): + self.__args = args + self.__kwargs = kwargs + super().__init__(*args, **kwargs) + + def __getstate__(self): + return self.__args, self.__kwargs + + def __setstate__(self, state): + args, kwargs = state + super().__init__(*args, **kwargs) + + +class CompleteDirs(InitializedState, zipfile.ZipFile): + """ + A ZipFile subclass that ensures that implied directories + are always included in the namelist. + + >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt'])) + ['foo/', 'foo/bar/'] + >>> list(CompleteDirs._implied_dirs(['foo/bar.txt', 'foo/bar/baz.txt', 'foo/bar/'])) + ['foo/'] + """ + + @staticmethod + def _implied_dirs(names): + parents = itertools.chain.from_iterable(map(_parents, names)) + as_dirs = (p + posixpath.sep for p in parents) + return _dedupe(_difference(as_dirs, names)) + + def namelist(self): + names = super().namelist() + return names + list(self._implied_dirs(names)) + + def _name_set(self): + return set(self.namelist()) + + def resolve_dir(self, name): + """ + If the name represents a directory, return that name + as a directory (with the trailing slash). + """ + names = self._name_set() + dirname = name + '/' + dir_match = name not in names and dirname in names + return dirname if dir_match else name + + def getinfo(self, name): + """ + Supplement getinfo for implied dirs. + """ + try: + return super().getinfo(name) + except KeyError: + if not name.endswith('/') or name not in self._name_set(): + raise + return zipfile.ZipInfo(filename=name) + + @classmethod + def make(cls, source): + """ + Given a source (filename or zipfile), return an + appropriate CompleteDirs subclass. + """ + if isinstance(source, CompleteDirs): + return source + + if not isinstance(source, zipfile.ZipFile): + return cls(source) + + # Only allow for FastLookup when supplied zipfile is read-only + if 'r' not in source.mode: + cls = CompleteDirs + + source.__class__ = cls + return source + + +class FastLookup(CompleteDirs): + """ + ZipFile subclass to ensure implicit + dirs exist and are resolved rapidly. + """ + + def namelist(self): + with contextlib.suppress(AttributeError): + return self.__names + self.__names = super().namelist() + return self.__names + + def _name_set(self): + with contextlib.suppress(AttributeError): + return self.__lookup + self.__lookup = super()._name_set() + return self.__lookup + + +def _extract_text_encoding(encoding=None, *args, **kwargs): + # stacklevel=3 so that the caller of the caller see any warning. + return text_encoding(encoding, 3), args, kwargs + + +class Path: + """ + A pathlib-compatible interface for zip files. + + Consider a zip file with this structure:: + + . + ├── a.txt + └── b + ├── c.txt + └── d + └── e.txt + + >>> data = io.BytesIO() + >>> zf = zipfile.ZipFile(data, 'w') + >>> zf.writestr('a.txt', 'content of a') + >>> zf.writestr('b/c.txt', 'content of c') + >>> zf.writestr('b/d/e.txt', 'content of e') + >>> zf.filename = 'mem/abcde.zip' + + Path accepts the zipfile object itself or a filename + + >>> root = Path(zf) + + From there, several path operations are available. + + Directory iteration (including the zip file itself): + + >>> a, b = root.iterdir() + >>> a + Path('mem/abcde.zip', 'a.txt') + >>> b + Path('mem/abcde.zip', 'b/') + + name property: + + >>> b.name + 'b' + + join with divide operator: + + >>> c = b / 'c.txt' + >>> c + Path('mem/abcde.zip', 'b/c.txt') + >>> c.name + 'c.txt' + + Read text: + + >>> c.read_text(encoding='utf-8') + 'content of c' + + existence: + + >>> c.exists() + True + >>> (b / 'missing.txt').exists() + False + + Coercion to string: + + >>> import os + >>> str(c).replace(os.sep, posixpath.sep) + 'mem/abcde.zip/b/c.txt' + + At the root, ``name``, ``filename``, and ``parent`` + resolve to the zipfile. Note these attributes are not + valid and will raise a ``ValueError`` if the zipfile + has no filename. + + >>> root.name + 'abcde.zip' + >>> str(root.filename).replace(os.sep, posixpath.sep) + 'mem/abcde.zip' + >>> str(root.parent) + 'mem' + """ + + __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})" + + def __init__(self, root, at=""): + """ + Construct a Path from a ZipFile or filename. + + Note: When the source is an existing ZipFile object, + its type (__class__) will be mutated to a + specialized type. If the caller wishes to retain the + original type, the caller should either create a + separate ZipFile object or pass a filename. + """ + self.root = FastLookup.make(root) + self.at = at + + def __eq__(self, other): + """ + >>> Path(zipfile.ZipFile(io.BytesIO(), 'w')) == 'foo' + False + """ + if self.__class__ is not other.__class__: + return NotImplemented + return (self.root, self.at) == (other.root, other.at) + + def __hash__(self): + return hash((self.root, self.at)) + + def open(self, mode='r', *args, pwd=None, **kwargs): + """ + Open this entry as text or binary following the semantics + of ``pathlib.Path.open()`` by passing arguments through + to io.TextIOWrapper(). + """ + if self.is_dir(): + raise IsADirectoryError(self) + zip_mode = mode[0] + if not self.exists() and zip_mode == 'r': + raise FileNotFoundError(self) + stream = self.root.open(self.at, zip_mode, pwd=pwd) + if 'b' in mode: + if args or kwargs: + raise ValueError("encoding args invalid for binary operation") + return stream + # Text mode: + encoding, args, kwargs = _extract_text_encoding(*args, **kwargs) + return io.TextIOWrapper(stream, encoding, *args, **kwargs) + + def _base(self): + return pathlib.PurePosixPath(self.at or self.root.filename) + + @property + def name(self): + return self._base().name + + @property + def suffix(self): + return self._base().suffix + + @property + def suffixes(self): + return self._base().suffixes + + @property + def stem(self): + return self._base().stem + + @property + def filename(self): + return pathlib.Path(self.root.filename).joinpath(self.at) + + def read_text(self, *args, **kwargs): + encoding, args, kwargs = _extract_text_encoding(*args, **kwargs) + with self.open('r', encoding, *args, **kwargs) as strm: + return strm.read() + + def read_bytes(self): + with self.open('rb') as strm: + return strm.read() + + def _is_child(self, path): + return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/") + + def _next(self, at): + return self.__class__(self.root, at) + + def is_dir(self): + return not self.at or self.at.endswith("/") + + def is_file(self): + return self.exists() and not self.is_dir() + + def exists(self): + return self.at in self.root._name_set() + + def iterdir(self): + if not self.is_dir(): + raise ValueError("Can't listdir a file") + subs = map(self._next, self.root.namelist()) + return filter(self._is_child, subs) + + def match(self, path_pattern): + return pathlib.PurePosixPath(self.at).match(path_pattern) + + def is_symlink(self): + """ + Return whether this path is a symlink. Always false (python/cpython#82102). + """ + return False + + def glob(self, pattern): + if not pattern: + raise ValueError(f"Unacceptable pattern: {pattern!r}") + + prefix = re.escape(self.at) + matches = re.compile(prefix + translate(pattern)).fullmatch + return map(self._next, filter(matches, self.root.namelist())) + + def rglob(self, pattern): + return self.glob(f'**/{pattern}') + + def relative_to(self, other, *extra): + return posixpath.relpath(str(self), str(other.joinpath(*extra))) + + def __str__(self): + return posixpath.join(self.root.filename, self.at) + + def __repr__(self): + return self.__repr.format(self=self) + + def joinpath(self, *other): + next = posixpath.join(self.at, *other) + return self._next(self.root.resolve_dir(next)) + + __truediv__ = joinpath + + @property + def parent(self): + if not self.at: + return self.filename.parent + parent_at = posixpath.dirname(self.at.rstrip('/')) + if parent_at: + parent_at += '/' + return self._next(parent_at) diff --git a/backend/test/lib/python3.8/site-packages/zipp/__pycache__/__init__.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/zipp/__pycache__/__init__.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c65f04fb26b966d198382866fd3133f3b6d940f7 Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/zipp/__pycache__/__init__.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/zipp/__pycache__/glob.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/zipp/__pycache__/glob.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cfb7e00780a35f018a536ff1a08ea4e03a6dbc4f Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/zipp/__pycache__/glob.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/zipp/__pycache__/py310compat.cpython-38.pyc b/backend/test/lib/python3.8/site-packages/zipp/__pycache__/py310compat.cpython-38.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac783377f7acec5629a26703dcf59d3162fa18db Binary files /dev/null and b/backend/test/lib/python3.8/site-packages/zipp/__pycache__/py310compat.cpython-38.pyc differ diff --git a/backend/test/lib/python3.8/site-packages/zipp/glob.py b/backend/test/lib/python3.8/site-packages/zipp/glob.py new file mode 100644 index 0000000000000000000000000000000000000000..4a2e665e27078aad32e2be2a43f02532cb99725d --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/zipp/glob.py @@ -0,0 +1,40 @@ +import re + + +def translate(pattern): + r""" + Given a glob pattern, produce a regex that matches it. + + >>> translate('*.txt') + '[^/]*\\.txt' + >>> translate('a?txt') + 'a.txt' + >>> translate('**/*') + '.*/[^/]*' + """ + return ''.join(map(replace, separate(pattern))) + + +def separate(pattern): + """ + Separate out character sets to avoid translating their contents. + + >>> [m.group(0) for m in separate('*.txt')] + ['*.txt'] + >>> [m.group(0) for m in separate('a[?]txt')] + ['a', '[?]', 'txt'] + """ + return re.finditer(r'([^\[]+)|(?P<set>[\[].*?[\]])|([\[][^\]]*$)', pattern) + + +def replace(match): + """ + Perform the replacements for a match from :func:`separate`. + """ + + return match.group('set') or ( + re.escape(match.group(0)) + .replace('\\*\\*', r'.*') + .replace('\\*', r'[^/]*') + .replace('\\?', r'.') + ) diff --git a/backend/test/lib/python3.8/site-packages/zipp/py310compat.py b/backend/test/lib/python3.8/site-packages/zipp/py310compat.py new file mode 100644 index 0000000000000000000000000000000000000000..d5ca53e037b267ff1b196b3b208dea0acdebe4c5 --- /dev/null +++ b/backend/test/lib/python3.8/site-packages/zipp/py310compat.py @@ -0,0 +1,11 @@ +import sys +import io + + +def _text_encoding(encoding, stacklevel=2, /): # pragma: no cover + return encoding + + +text_encoding = ( + io.text_encoding if sys.version_info > (3, 10) else _text_encoding # type: ignore +) diff --git a/backend/test/lib64 b/backend/test/lib64 new file mode 120000 index 0000000000000000000000000000000000000000..7951405f85a569efbacc12fccfee529ef1866602 --- /dev/null +++ b/backend/test/lib64 @@ -0,0 +1 @@ +lib \ No newline at end of file diff --git a/backend/test/pyvenv.cfg b/backend/test/pyvenv.cfg new file mode 100644 index 0000000000000000000000000000000000000000..853404e23c0366b53610217d8f603a2f9dd1feeb --- /dev/null +++ b/backend/test/pyvenv.cfg @@ -0,0 +1,3 @@ +home = /usr/bin +include-system-site-packages = false +version = 3.8.10 diff --git a/backend/test/share/python-wheels/CacheControl-0.12.6-py2.py3-none-any.whl b/backend/test/share/python-wheels/CacheControl-0.12.6-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..3f3a89504d8c216ed142ca8b3f0e511f3d12cc90 Binary files /dev/null and b/backend/test/share/python-wheels/CacheControl-0.12.6-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/appdirs-1.4.3-py2.py3-none-any.whl b/backend/test/share/python-wheels/appdirs-1.4.3-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..629bdcc5cb9380a9a0a1dbe562d2b8d3e353d2c6 Binary files /dev/null and b/backend/test/share/python-wheels/appdirs-1.4.3-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/certifi-2019.11.28-py2.py3-none-any.whl b/backend/test/share/python-wheels/certifi-2019.11.28-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..cc1145339fdd7ba331b762076af38c07eb77bf08 Binary files /dev/null and b/backend/test/share/python-wheels/certifi-2019.11.28-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/chardet-3.0.4-py2.py3-none-any.whl b/backend/test/share/python-wheels/chardet-3.0.4-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..3d27d991b93657fcb06519ec295710b126da84cf Binary files /dev/null and b/backend/test/share/python-wheels/chardet-3.0.4-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/colorama-0.4.3-py2.py3-none-any.whl b/backend/test/share/python-wheels/colorama-0.4.3-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..96b3fb126da2a87897ed71abd54d383587cf181e Binary files /dev/null and b/backend/test/share/python-wheels/colorama-0.4.3-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/contextlib2-0.6.0-py2.py3-none-any.whl b/backend/test/share/python-wheels/contextlib2-0.6.0-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..d10bcdeb0cee2eaed9cb1b92f42ab34bc48af346 Binary files /dev/null and b/backend/test/share/python-wheels/contextlib2-0.6.0-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/distlib-0.3.0-py2.py3-none-any.whl b/backend/test/share/python-wheels/distlib-0.3.0-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..193b6ac4028cfdba42146b225db0d952ffc05104 Binary files /dev/null and b/backend/test/share/python-wheels/distlib-0.3.0-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/distro-1.4.0-py2.py3-none-any.whl b/backend/test/share/python-wheels/distro-1.4.0-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..9c7ccf1eb245b1c0e0d8a31f4821c781ae5ad908 Binary files /dev/null and b/backend/test/share/python-wheels/distro-1.4.0-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/html5lib-1.0.1-py2.py3-none-any.whl b/backend/test/share/python-wheels/html5lib-1.0.1-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..92806672ef623bb4117f012785c62b4ebbdbdf9f Binary files /dev/null and b/backend/test/share/python-wheels/html5lib-1.0.1-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/idna-2.8-py2.py3-none-any.whl b/backend/test/share/python-wheels/idna-2.8-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..925157560ee235e866b2539b37d30b570089e84b Binary files /dev/null and b/backend/test/share/python-wheels/idna-2.8-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/ipaddr-2.2.0-py2.py3-none-any.whl b/backend/test/share/python-wheels/ipaddr-2.2.0-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..dd676316cd0045c70a9dd510f8b3d875684e4a22 Binary files /dev/null and b/backend/test/share/python-wheels/ipaddr-2.2.0-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/lockfile-0.12.2-py2.py3-none-any.whl b/backend/test/share/python-wheels/lockfile-0.12.2-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..d9efa3f1be68c5604028b009974d0a4ab5e178ea Binary files /dev/null and b/backend/test/share/python-wheels/lockfile-0.12.2-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/msgpack-0.6.2-py2.py3-none-any.whl b/backend/test/share/python-wheels/msgpack-0.6.2-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..638f215f21e81da9b71402c04d716025456e5dfb Binary files /dev/null and b/backend/test/share/python-wheels/msgpack-0.6.2-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/packaging-20.3-py2.py3-none-any.whl b/backend/test/share/python-wheels/packaging-20.3-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..7285070132a20da3e9a6a14b490c1077e10ec5d1 Binary files /dev/null and b/backend/test/share/python-wheels/packaging-20.3-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/pep517-0.8.2-py2.py3-none-any.whl b/backend/test/share/python-wheels/pep517-0.8.2-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..2d3bc05015c5732c3820afd79aa06b6f20c6700a Binary files /dev/null and b/backend/test/share/python-wheels/pep517-0.8.2-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/pip-20.0.2-py2.py3-none-any.whl b/backend/test/share/python-wheels/pip-20.0.2-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..6f88203f5f1f7c910596fb76e518e42bfe1aeca7 Binary files /dev/null and b/backend/test/share/python-wheels/pip-20.0.2-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl b/backend/test/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..22081cd9d5236168edf809c232474a32d8a60dfe Binary files /dev/null and b/backend/test/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/progress-1.5-py2.py3-none-any.whl b/backend/test/share/python-wheels/progress-1.5-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..d4ce42d50c24180578cbaf895e9f6ec6b0172494 Binary files /dev/null and b/backend/test/share/python-wheels/progress-1.5-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/pyparsing-2.4.6-py2.py3-none-any.whl b/backend/test/share/python-wheels/pyparsing-2.4.6-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..97c72f19db3921e09757cac22dace4f64a6efb15 Binary files /dev/null and b/backend/test/share/python-wheels/pyparsing-2.4.6-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/requests-2.22.0-py2.py3-none-any.whl b/backend/test/share/python-wheels/requests-2.22.0-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..0936e56b1c7ff22834f5da57e4d7104cb960a69f Binary files /dev/null and b/backend/test/share/python-wheels/requests-2.22.0-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/retrying-1.3.3-py2.py3-none-any.whl b/backend/test/share/python-wheels/retrying-1.3.3-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..7c98727cf8f23b3b8e3ddc0b1cb401effc454a8d Binary files /dev/null and b/backend/test/share/python-wheels/retrying-1.3.3-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/setuptools-44.0.0-py2.py3-none-any.whl b/backend/test/share/python-wheels/setuptools-44.0.0-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..adc27761239ac8d526a6d1d132d1e6b876d704f9 Binary files /dev/null and b/backend/test/share/python-wheels/setuptools-44.0.0-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/six-1.14.0-py2.py3-none-any.whl b/backend/test/share/python-wheels/six-1.14.0-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..f66dca602f0cb5719a6642cf8d98af17c0625c74 Binary files /dev/null and b/backend/test/share/python-wheels/six-1.14.0-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/toml-0.10.0-py2.py3-none-any.whl b/backend/test/share/python-wheels/toml-0.10.0-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..4c505473165a2703fbd597654777ac2cd676d72c Binary files /dev/null and b/backend/test/share/python-wheels/toml-0.10.0-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/urllib3-1.25.8-py2.py3-none-any.whl b/backend/test/share/python-wheels/urllib3-1.25.8-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..df00d69e2ac2a9eab7a437f4cdf6dfe90f15117e Binary files /dev/null and b/backend/test/share/python-wheels/urllib3-1.25.8-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/webencodings-0.5.1-py2.py3-none-any.whl b/backend/test/share/python-wheels/webencodings-0.5.1-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..a82791f816cf16dcd3e3fc7278ab4982b2126b5f Binary files /dev/null and b/backend/test/share/python-wheels/webencodings-0.5.1-py2.py3-none-any.whl differ diff --git a/backend/test/share/python-wheels/wheel-0.34.2-py2.py3-none-any.whl b/backend/test/share/python-wheels/wheel-0.34.2-py2.py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..4d476be01c858425b85740dd1aac3aaf4b965bf1 Binary files /dev/null and b/backend/test/share/python-wheels/wheel-0.34.2-py2.py3-none-any.whl differ diff --git a/frontend b/frontend new file mode 160000 index 0000000000000000000000000000000000000000..6c0c458d82d78be23a5ebf29e3257964553274e3 --- /dev/null +++ b/frontend @@ -0,0 +1 @@ +Subproject commit 6c0c458d82d78be23a5ebf29e3257964553274e3